lang
stringclasses
1 value
license
stringclasses
13 values
stderr
stringlengths
0
350
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
7
45.1k
new_contents
stringlengths
0
1.87M
new_file
stringlengths
6
292
old_contents
stringlengths
0
1.87M
message
stringlengths
6
9.26k
old_file
stringlengths
6
292
subject
stringlengths
0
4.45k
Java
apache-2.0
b3bfb2a78bea8ee149bfe1c67a7c7cd1d6d8899a
0
gbif/occurrence,gbif/occurrence,gbif/occurrence
package org.gbif.occurrence.search.es; import org.gbif.api.model.common.search.SearchConstants; import org.gbif.api.model.occurrence.search.OccurrenceSearchParameter; import org.gbif.api.model.occurrence.search.OccurrenceSearchRequest; import org.gbif.api.util.VocabularyUtils; import org.gbif.api.vocabulary.Country; import java.io.IOException; import java.util.*; import java.util.function.Function; import java.util.function.IntUnaryOperator; import java.util.stream.Collectors; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.builders.*; import org.elasticsearch.index.query.*; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.SuggestBuilders; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Polygon; import org.locationtech.jts.io.ParseException; import org.locationtech.jts.io.WKTReader; import static org.gbif.api.util.SearchTypeValidator.isRange; import static org.gbif.occurrence.search.es.EsQueryUtils.*; import static org.gbif.occurrence.search.es.OccurrenceEsField.COORDINATE_SHAPE; import static org.gbif.occurrence.search.es.OccurrenceEsField.FULL_TEXT; public class EsSearchRequestBuilder { private static final int MAX_SIZE_TERMS_AGGS = 1200000; private static final IntUnaryOperator DEFAULT_SHARD_SIZE = size -> (size * 2) + 50000; private EsSearchRequestBuilder() {} public static SearchRequest buildSearchRequest( OccurrenceSearchRequest searchRequest, boolean facetsEnabled, String index) { SearchRequest esRequest = new SearchRequest(); esRequest.indices(index); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); esRequest.source(searchSourceBuilder); // size and offset searchSourceBuilder.size(searchRequest.getLimit()); searchSourceBuilder.from((int) searchRequest.getOffset()); // sort if (!Strings.isNullOrEmpty(searchRequest.getQ())) { searchSourceBuilder.sort(SortBuilders.scoreSort()); } // group params GroupedParams groupedParams = groupParameters(searchRequest); // add query buildQuery(groupedParams.queryParams, searchRequest.getQ()) .ifPresent(searchSourceBuilder::query); // add aggs buildAggs(searchRequest, groupedParams.postFilterParams, facetsEnabled) .ifPresent(aggsList -> aggsList.forEach(searchSourceBuilder::aggregation)); // post-filter buildPostFilter(groupedParams.postFilterParams).ifPresent(searchSourceBuilder::postFilter); return esRequest; } public static Optional<QueryBuilder> buildQueryNode(OccurrenceSearchRequest searchRequest) { return buildQuery(searchRequest.getParameters(), searchRequest.getQ()); } static SearchRequest buildSuggestQuery( String prefix, OccurrenceSearchParameter parameter, Integer limit, String index) { SearchRequest request = new SearchRequest(); request.indices(index); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); request.source(searchSourceBuilder); OccurrenceEsField esField = SEARCH_TO_ES_MAPPING.get(parameter); // create suggest query searchSourceBuilder.suggest( new SuggestBuilder() .addSuggestion( esField.getFieldName(), SuggestBuilders.completionSuggestion(esField.getFieldName() + ".suggest") .prefix(prefix) .size(limit != null ? limit : SearchConstants.DEFAULT_SUGGEST_LIMIT) .skipDuplicates(true))); // add source field searchSourceBuilder.fetchSource(esField.getFieldName(), null); return request; } private static Optional<QueryBuilder> buildQuery( Multimap<OccurrenceSearchParameter, String> params, String qParam) { // create bool node BoolQueryBuilder bool = QueryBuilders.boolQuery(); // adding full text search parameter if (!Strings.isNullOrEmpty(qParam)) { bool.must(QueryBuilders.matchQuery(FULL_TEXT.getFieldName(), qParam)); } if (params != null && !params.isEmpty()) { // adding geometry to bool if (params.containsKey(OccurrenceSearchParameter.GEOMETRY)) { BoolQueryBuilder shouldGeometry = QueryBuilders.boolQuery(); shouldGeometry .should() .addAll( params.get(OccurrenceSearchParameter.GEOMETRY).stream() .map(EsSearchRequestBuilder::buildGeoShapeQuery) .collect(Collectors.toList())); bool.filter().add(shouldGeometry); } // adding term queries to bool bool.filter() .addAll( params.asMap().entrySet().stream() .filter(e -> Objects.nonNull(SEARCH_TO_ES_MAPPING.get(e.getKey()))) .flatMap( e -> buildTermQuery( e.getValue(), e.getKey(), SEARCH_TO_ES_MAPPING.get(e.getKey())) .stream()) .collect(Collectors.toList())); } return bool.must().isEmpty() && bool.filter().isEmpty() ? Optional.empty() : Optional.of(bool); } @VisibleForTesting static GroupedParams groupParameters(OccurrenceSearchRequest searchRequest) { GroupedParams groupedParams = new GroupedParams(); if (!searchRequest.isMultiSelectFacets() || searchRequest.getFacets() == null || searchRequest.getFacets().isEmpty()) { groupedParams.queryParams = searchRequest.getParameters(); return groupedParams; } groupedParams.queryParams = ArrayListMultimap.create(); groupedParams.postFilterParams = ArrayListMultimap.create(); searchRequest .getParameters() .asMap() .forEach( (k, v) -> { if (searchRequest.getFacets().contains(k)) { groupedParams.postFilterParams.putAll(k, v); } else { groupedParams.queryParams.putAll(k, v); } }); return groupedParams; } private static Optional<QueryBuilder> buildPostFilter( Multimap<OccurrenceSearchParameter, String> postFilterParams) { if (postFilterParams == null || postFilterParams.isEmpty()) { return Optional.empty(); } BoolQueryBuilder bool = QueryBuilders.boolQuery(); bool.filter() .addAll( postFilterParams.asMap().entrySet().stream() .flatMap( e -> buildTermQuery( e.getValue(), e.getKey(), SEARCH_TO_ES_MAPPING.get(e.getKey())) .stream()) .collect(Collectors.toList())); return Optional.of(bool); } private static Optional<List<AggregationBuilder>> buildAggs( OccurrenceSearchRequest searchRequest, Multimap<OccurrenceSearchParameter, String> postFilterParams, boolean facetsEnabled) { if (!facetsEnabled || searchRequest.getFacets() == null || searchRequest.getFacets().isEmpty()) { return Optional.empty(); } if (searchRequest.isMultiSelectFacets() && postFilterParams != null && !postFilterParams.isEmpty()) { return Optional.of(buildFacetsMultiselect(searchRequest, postFilterParams)); } return Optional.of(buildFacets(searchRequest)); } private static List<AggregationBuilder> buildFacetsMultiselect( OccurrenceSearchRequest searchRequest, Multimap<OccurrenceSearchParameter, String> postFilterParams) { if (searchRequest.getFacets().size() == 1) { // same case as normal facets return buildFacets(searchRequest); } return searchRequest.getFacets().stream() .filter(p -> SEARCH_TO_ES_MAPPING.get(p) != null) .map( facetParam -> { // build filter aggs BoolQueryBuilder bool = QueryBuilders.boolQuery(); bool.filter() .addAll( postFilterParams.asMap().entrySet().stream() .filter(entry -> entry.getKey() != facetParam) .flatMap( e -> buildTermQuery( e.getValue(), e.getKey(), SEARCH_TO_ES_MAPPING.get(e.getKey())) .stream()) .collect(Collectors.toList())); // add filter to the aggs OccurrenceEsField esField = SEARCH_TO_ES_MAPPING.get(facetParam); FilterAggregationBuilder filterAggs = AggregationBuilders.filter(esField.getFieldName(), bool); // build terms aggs and add it to the filter aggs TermsAggregationBuilder termsAggs = buildTermsAggs( "filtered_" + esField.getFieldName(), esField, extractFacetOffset(searchRequest, facetParam), extractFacetLimit(searchRequest, facetParam), searchRequest.getFacetMinCount()); filterAggs.subAggregation(termsAggs); return filterAggs; }) .collect(Collectors.toList()); } private static List<AggregationBuilder> buildFacets(OccurrenceSearchRequest searchRequest) { return searchRequest.getFacets().stream() .filter(p -> SEARCH_TO_ES_MAPPING.get(p) != null) .map( facetParam -> { OccurrenceEsField esField = SEARCH_TO_ES_MAPPING.get(facetParam); return buildTermsAggs( esField.getFieldName(), esField, extractFacetOffset(searchRequest, facetParam), extractFacetLimit(searchRequest, facetParam), searchRequest.getFacetMinCount()); }) .collect(Collectors.toList()); } private static TermsAggregationBuilder buildTermsAggs( String aggsName, OccurrenceEsField esField, int facetOffset, int facetLimit, Integer minCount) { // build aggs for the field TermsAggregationBuilder termsAggsBuilder = AggregationBuilders.terms(aggsName).field(esField.getFieldName()); // min count Optional.ofNullable(minCount).ifPresent(termsAggsBuilder::minDocCount); // aggs size int size = calculateAggsSize(esField, facetOffset, facetLimit); termsAggsBuilder.size(size); // aggs shard size termsAggsBuilder.shardSize( CARDINALITIES.getOrDefault(esField, DEFAULT_SHARD_SIZE.applyAsInt(size))); return termsAggsBuilder; } private static int calculateAggsSize(OccurrenceEsField esField, int facetOffset, int facetLimit) { int maxCardinality = CARDINALITIES.getOrDefault(esField, Integer.MAX_VALUE); // the limit is bounded by the max cardinality of the field int limit = Math.min(facetOffset + facetLimit, maxCardinality); // we set a maximum limit for performance reasons if (limit > MAX_SIZE_TERMS_AGGS) { throw new IllegalArgumentException( "Facets paging is only supported up to " + MAX_SIZE_TERMS_AGGS + " elements"); } return limit; } /** * Mapping parameter values into know values for Enums. Non-enum parameter values are passed using * its raw value. */ private static String parseParamValue(String value, OccurrenceSearchParameter parameter) { if (Enum.class.isAssignableFrom(parameter.type()) && !Country.class.isAssignableFrom(parameter.type())) { return VocabularyUtils.lookup(value, (Class<Enum<?>>) parameter.type()) .transform(Enum::name) .orNull(); } if (Boolean.class.isAssignableFrom(parameter.type())) { return value.toLowerCase(); } return value; } private static List<QueryBuilder> buildTermQuery( Collection<String> values, OccurrenceSearchParameter param, OccurrenceEsField esField) { List<QueryBuilder> queries = new ArrayList<>(); // collect queries for each value List<String> parsedValues = new ArrayList<>(); for (String value : values) { if (isRange(value)) { queries.add(buildRangeQuery(esField, value)); continue; } parsedValues.add(parseParamValue(value, param)); } if (parsedValues.size() == 1) { // single term queries.add(QueryBuilders.termQuery(esField.getFieldName(), parsedValues.get(0))); } else if (parsedValues.size() > 1) { // multi term query queries.add(QueryBuilders.termsQuery(esField.getFieldName(), parsedValues)); } return queries; } private static RangeQueryBuilder buildRangeQuery(OccurrenceEsField esField, String value) { String[] values = value.split(RANGE_SEPARATOR); RangeQueryBuilder builder = QueryBuilders.rangeQuery(esField.getFieldName()); if (!RANGE_WILDCARD.equals(values[0])) { builder.gte(values[0]); } if (!RANGE_WILDCARD.equals(values[1])) { builder.lte(values[1]); } return builder; } public static GeoShapeQueryBuilder buildGeoShapeQuery(String wkt) { Geometry geometry; try { geometry = new WKTReader().read(wkt); } catch (ParseException e) { throw new IllegalArgumentException(e.getMessage(), e); } Function<Polygon, PolygonBuilder> polygonToBuilder = polygon -> { PolygonBuilder polygonBuilder = new PolygonBuilder( new CoordinatesBuilder() .coordinates( normalizePolygonCoordinates(polygon.getExteriorRing().getCoordinates()))); for (int i = 0; i < polygon.getNumInteriorRing(); i++) { polygonBuilder.hole( new LineStringBuilder( new CoordinatesBuilder() .coordinates( normalizePolygonCoordinates( polygon.getInteriorRingN(i).getCoordinates())))); } return polygonBuilder; }; String type = "LinearRing".equals(geometry.getGeometryType()) ? "LINESTRING" : geometry.getGeometryType().toUpperCase(); ShapeBuilder shapeBuilder = null; if (("POINT").equals(type)) { shapeBuilder = new PointBuilder(geometry.getCoordinate().x, geometry.getCoordinate().y); } else if ("LINESTRING".equals(type)) { shapeBuilder = new LineStringBuilder(Arrays.asList(geometry.getCoordinates())); } else if ("POLYGON".equals(type)) { shapeBuilder = polygonToBuilder.apply((Polygon) geometry); } else if ("MULTIPOLYGON".equals(type)) { // multipolygon MultiPolygonBuilder multiPolygonBuilder = new MultiPolygonBuilder(); for (int i = 0; i < geometry.getNumGeometries(); i++) { multiPolygonBuilder.polygon(polygonToBuilder.apply((Polygon) geometry.getGeometryN(i))); } shapeBuilder = multiPolygonBuilder; } else { throw new IllegalArgumentException(type + " shape is not supported"); } try { return QueryBuilders.geoShapeQuery(COORDINATE_SHAPE.getFieldName(), shapeBuilder) .relation(ShapeRelation.WITHIN); } catch (IOException e) { throw new IllegalStateException(e.getMessage(), e); } } /** * Eliminates duplicates but discarding the first and the last coordinates. The order must be * preserved */ private static Coordinate[] normalizePolygonCoordinates(Coordinate[] coordinates) { List<Coordinate> normalizedCoordinates = new ArrayList<>(); normalizedCoordinates.add(0, coordinates[0]); int i = 1; Set<Coordinate> uniqueIntermediateCoords = new HashSet<>(); for (int j = 1; j < coordinates.length - 1; j++) { Coordinate jCoord = coordinates[j]; if (!uniqueIntermediateCoords.contains(jCoord)) { uniqueIntermediateCoords.add(jCoord); normalizedCoordinates.add(i++, jCoord); } } normalizedCoordinates.add(i, coordinates[coordinates.length - 1]); return normalizedCoordinates.toArray(new Coordinate[0]); } @VisibleForTesting static class GroupedParams { Multimap<OccurrenceSearchParameter, String> postFilterParams; Multimap<OccurrenceSearchParameter, String> queryParams; } }
occurrence-search/src/main/java/org/gbif/occurrence/search/es/EsSearchRequestBuilder.java
package org.gbif.occurrence.search.es; import org.gbif.api.model.common.search.SearchConstants; import org.gbif.api.model.occurrence.search.OccurrenceSearchParameter; import org.gbif.api.model.occurrence.search.OccurrenceSearchRequest; import org.gbif.api.util.VocabularyUtils; import org.gbif.api.vocabulary.Country; import java.io.IOException; import java.util.*; import java.util.function.Function; import java.util.function.IntUnaryOperator; import java.util.stream.Collectors; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.builders.*; import org.elasticsearch.index.query.*; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.SuggestBuilders; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Polygon; import org.locationtech.jts.io.ParseException; import org.locationtech.jts.io.WKTReader; import static org.gbif.api.util.SearchTypeValidator.isRange; import static org.gbif.occurrence.search.es.EsQueryUtils.*; import static org.gbif.occurrence.search.es.OccurrenceEsField.COORDINATE_SHAPE; import static org.gbif.occurrence.search.es.OccurrenceEsField.FULL_TEXT; public class EsSearchRequestBuilder { private static final int MAX_SIZE_TERMS_AGGS = 1200000; private static final IntUnaryOperator DEFAULT_SHARD_SIZE = size -> (size * 2) + 50000; private EsSearchRequestBuilder() {} public static SearchRequest buildSearchRequest( OccurrenceSearchRequest searchRequest, boolean facetsEnabled, String index) { SearchRequest esRequest = new SearchRequest(); esRequest.indices(index); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); esRequest.source(searchSourceBuilder); // size and offset searchSourceBuilder.size(searchRequest.getLimit()); searchSourceBuilder.from((int) searchRequest.getOffset()); // sort if (!Strings.isNullOrEmpty(searchRequest.getQ())) { searchSourceBuilder.sort(SortBuilders.scoreSort()); } // group params GroupedParams groupedParams = groupParameters(searchRequest); // add query buildQuery(groupedParams.queryParams, searchRequest.getQ()) .ifPresent(searchSourceBuilder::query); // add aggs buildAggs(searchRequest, groupedParams.postFilterParams, facetsEnabled) .ifPresent(aggsList -> aggsList.forEach(searchSourceBuilder::aggregation)); // post-filter buildPostFilter(groupedParams.postFilterParams).ifPresent(searchSourceBuilder::postFilter); return esRequest; } public static Optional<QueryBuilder> buildQueryNode(OccurrenceSearchRequest searchRequest) { return buildQuery(searchRequest.getParameters(), searchRequest.getQ()); } static SearchRequest buildSuggestQuery( String prefix, OccurrenceSearchParameter parameter, Integer limit, String index) { SearchRequest request = new SearchRequest(); request.indices(index); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); request.source(searchSourceBuilder); OccurrenceEsField esField = SEARCH_TO_ES_MAPPING.get(parameter); // create suggest query searchSourceBuilder.suggest( new SuggestBuilder() .addSuggestion( esField.getFieldName(), SuggestBuilders.completionSuggestion(esField.getFieldName() + ".suggest") .prefix(prefix) .size(limit != null ? limit : SearchConstants.DEFAULT_SUGGEST_LIMIT) .skipDuplicates(true))); // add source field searchSourceBuilder.fetchSource(esField.getFieldName(), null); return request; } private static Optional<QueryBuilder> buildQuery( Multimap<OccurrenceSearchParameter, String> params, String qParam) { // create bool node BoolQueryBuilder bool = QueryBuilders.boolQuery(); // adding full text search parameter if (!Strings.isNullOrEmpty(qParam)) { bool.must(QueryBuilders.matchQuery(FULL_TEXT.getFieldName(), qParam)); } if (params != null && !params.isEmpty()) { // adding geometry to bool if (params.containsKey(OccurrenceSearchParameter.GEOMETRY)) { BoolQueryBuilder shouldGeometry = QueryBuilders.boolQuery(); shouldGeometry .should() .addAll( params.get(OccurrenceSearchParameter.GEOMETRY).stream() .map(EsSearchRequestBuilder::buildGeoShapeQuery) .collect(Collectors.toList())); bool.filter().add(shouldGeometry); } // adding term queries to bool bool.filter() .addAll( params.asMap().entrySet().stream() .filter(e -> Objects.nonNull(SEARCH_TO_ES_MAPPING.get(e.getKey()))) .flatMap( e -> buildTermQuery( e.getValue(), e.getKey(), SEARCH_TO_ES_MAPPING.get(e.getKey())) .stream()) .collect(Collectors.toList())); } return bool.must().isEmpty() && bool.filter().isEmpty() ? Optional.empty() : Optional.of(bool); } @VisibleForTesting static GroupedParams groupParameters(OccurrenceSearchRequest searchRequest) { GroupedParams groupedParams = new GroupedParams(); if (!searchRequest.isMultiSelectFacets() || searchRequest.getFacets() == null || searchRequest.getFacets().isEmpty()) { groupedParams.queryParams = searchRequest.getParameters(); return groupedParams; } groupedParams.queryParams = ArrayListMultimap.create(); groupedParams.postFilterParams = ArrayListMultimap.create(); searchRequest .getParameters() .asMap() .forEach( (k, v) -> { if (searchRequest.getFacets().contains(k)) { groupedParams.postFilterParams.putAll(k, v); } else { groupedParams.queryParams.putAll(k, v); } }); return groupedParams; } private static Optional<QueryBuilder> buildPostFilter( Multimap<OccurrenceSearchParameter, String> postFilterParams) { if (postFilterParams == null || postFilterParams.isEmpty()) { return Optional.empty(); } BoolQueryBuilder bool = QueryBuilders.boolQuery(); bool.filter() .addAll( postFilterParams.asMap().entrySet().stream() .flatMap( e -> buildTermQuery( e.getValue(), e.getKey(), SEARCH_TO_ES_MAPPING.get(e.getKey())) .stream()) .collect(Collectors.toList())); return Optional.of(bool); } private static Optional<List<AggregationBuilder>> buildAggs( OccurrenceSearchRequest searchRequest, Multimap<OccurrenceSearchParameter, String> postFilterParams, boolean facetsEnabled) { if (!facetsEnabled || searchRequest.getFacets() == null || searchRequest.getFacets().isEmpty()) { return Optional.empty(); } if (searchRequest.isMultiSelectFacets() && postFilterParams != null && !postFilterParams.isEmpty()) { return Optional.of(buildFacetsMultiselect(searchRequest, postFilterParams)); } return Optional.of(buildFacets(searchRequest)); } private static List<AggregationBuilder> buildFacetsMultiselect( OccurrenceSearchRequest searchRequest, Multimap<OccurrenceSearchParameter, String> postFilterParams) { if (searchRequest.getFacets().size() == 1) { // same case as normal facets return buildFacets(searchRequest); } return searchRequest.getFacets().stream() .filter(p -> SEARCH_TO_ES_MAPPING.get(p) != null) .map( facetParam -> { // build filter aggs BoolQueryBuilder bool = QueryBuilders.boolQuery(); bool.filter() .addAll( postFilterParams.asMap().entrySet().stream() .filter(entry -> entry.getKey() != facetParam) .flatMap( e -> buildTermQuery( e.getValue(), e.getKey(), SEARCH_TO_ES_MAPPING.get(e.getKey())) .stream()) .collect(Collectors.toList())); // add filter to the aggs OccurrenceEsField esField = SEARCH_TO_ES_MAPPING.get(facetParam); FilterAggregationBuilder filterAggs = AggregationBuilders.filter(esField.getFieldName(), bool); // build terms aggs and add it to the filter aggs TermsAggregationBuilder termsAggs = buildTermsAggs( "filtered_" + esField.getFieldName(), esField, extractFacetOffset(searchRequest, facetParam), extractFacetLimit(searchRequest, facetParam), searchRequest.getFacetMinCount()); filterAggs.subAggregation(termsAggs); return filterAggs; }) .collect(Collectors.toList()); } private static List<AggregationBuilder> buildFacets(OccurrenceSearchRequest searchRequest) { return searchRequest.getFacets().stream() .filter(p -> SEARCH_TO_ES_MAPPING.get(p) != null) .map( facetParam -> { OccurrenceEsField esField = SEARCH_TO_ES_MAPPING.get(facetParam); return buildTermsAggs( esField.getFieldName(), esField, extractFacetOffset(searchRequest, facetParam), extractFacetLimit(searchRequest, facetParam), searchRequest.getFacetMinCount()); }) .collect(Collectors.toList()); } private static TermsAggregationBuilder buildTermsAggs( String aggsName, OccurrenceEsField esField, int facetOffset, int facetLimit, Integer minCount) { // build aggs for the field TermsAggregationBuilder termsAggsBuilder = AggregationBuilders.terms(aggsName).field(esField.getFieldName()); // min count Optional.ofNullable(minCount).ifPresent(termsAggsBuilder::minDocCount); // aggs size int size = calculateAggsSize(esField, facetOffset, facetLimit); termsAggsBuilder.size(size); // aggs shard size termsAggsBuilder.shardSize( CARDINALITIES.getOrDefault(esField, DEFAULT_SHARD_SIZE.applyAsInt(size))); return termsAggsBuilder; } private static int calculateAggsSize(OccurrenceEsField esField, int facetOffset, int facetLimit) { int maxCardinality = CARDINALITIES.getOrDefault(esField, Integer.MAX_VALUE); // the limit is bounded by the max cardinality of the field int limit = Math.min(facetOffset + facetLimit, maxCardinality); // we set a maximum limit for performance reasons if (limit > MAX_SIZE_TERMS_AGGS) { throw new IllegalArgumentException( "Facets paging is only supported up to " + MAX_SIZE_TERMS_AGGS + " elements"); } return limit; } /** * Mapping parameter values into know values for Enums. Non-enum parameter values are passed using * its raw value. */ private static String parseParamValue(String value, OccurrenceSearchParameter parameter) { if (Enum.class.isAssignableFrom(parameter.type()) && !Country.class.isAssignableFrom(parameter.type())) { return VocabularyUtils.lookup(value, (Class<Enum<?>>) parameter.type()) .transform(Enum::name) .orNull(); } if (Boolean.class.isAssignableFrom(parameter.type())) { return value.toLowerCase(); } return value; } private static List<QueryBuilder> buildTermQuery( Collection<String> values, OccurrenceSearchParameter param, OccurrenceEsField esField) { List<QueryBuilder> queries = new ArrayList<>(); // collect queries for each value List<String> parsedValues = new ArrayList<>(); for (String value : values) { if (isRange(value)) { queries.add(buildRangeQuery(esField, value)); continue; } parsedValues.add(parseParamValue(value, param)); } if (parsedValues.size() == 1) { // single term queries.add(QueryBuilders.termQuery(esField.getFieldName(), parsedValues.get(0))); } else if (parsedValues.size() > 1) { // multi term query queries.add(QueryBuilders.termsQuery(esField.getFieldName(), parsedValues)); } return queries; } private static RangeQueryBuilder buildRangeQuery(OccurrenceEsField esField, String value) { String[] values = value.split(RANGE_SEPARATOR); RangeQueryBuilder builder = QueryBuilders.rangeQuery(esField.getFieldName()); if (!RANGE_WILDCARD.equals(values[0])) { builder.gte(values[0]); } if (!RANGE_WILDCARD.equals(values[1])) { builder.lte(values[1]); } return builder; } public static GeoShapeQueryBuilder buildGeoShapeQuery(String wkt) { Geometry geometry; try { geometry = new WKTReader().read(wkt); } catch (ParseException e) { throw new IllegalArgumentException(e.getMessage(), e); } Function<Polygon, PolygonBuilder> polygonToBuilder = polygon -> { PolygonBuilder polygonBuilder = new PolygonBuilder( new CoordinatesBuilder() .coordinates( normalizePolygonCoordinates(polygon.getExteriorRing().getCoordinates()))); for (int i = 0; i < polygon.getNumInteriorRing(); i++) { polygonBuilder.hole( new LineStringBuilder( new CoordinatesBuilder() .coordinates( normalizePolygonCoordinates( polygon.getInteriorRingN(i).getCoordinates())))); } return polygonBuilder; }; String type = "LinearRing".equals(geometry.getGeometryType()) ? "LINESTRING" : geometry.getGeometryType().toUpperCase(); ShapeBuilder shapeBuilder = null; if (("POINT").equals(type)) { shapeBuilder = new PointBuilder(geometry.getCoordinate().x, geometry.getCoordinate().y); } else if ("LINESTRING".equals(type)) { shapeBuilder = new LineStringBuilder(Arrays.asList(geometry.getCoordinates())); } else if ("POLYGON".equals(type)) { shapeBuilder = polygonToBuilder.apply((Polygon) geometry); } else if ("MULTIPOLYGON".equals(type)) { // multipolygon MultiPolygonBuilder multiPolygonBuilder = new MultiPolygonBuilder(); for (int i = 0; i < geometry.getNumGeometries(); i++) { multiPolygonBuilder.polygon(polygonToBuilder.apply((Polygon) geometry.getGeometryN(i))); } shapeBuilder = multiPolygonBuilder; } else { throw new IllegalArgumentException(type + " shape is not supported"); } try { return QueryBuilders.geoShapeQuery(COORDINATE_SHAPE.getFieldName(), shapeBuilder) .relation(ShapeRelation.WITHIN); } catch (IOException e) { throw new IllegalStateException(e.getMessage(), e); } } /** * Eliminates duplicates but discarding the first and the last coordinates. The order must be * preserved */ private static Coordinate[] normalizePolygonCoordinates(Coordinate[] coordinates) { Set<Coordinate> uniqueIntermediateCoords = new HashSet<>(); List<Coordinate> normalizedCoordinates = new ArrayList<>(); normalizedCoordinates.add(0, coordinates[0]); int i = 1; for (int j = 1; j < coordinates.length - 1; j++) { Coordinate jCoord = coordinates[j]; if (!uniqueIntermediateCoords.contains(jCoord)) { uniqueIntermediateCoords.add(jCoord); normalizedCoordinates.add(i++, jCoord); } } normalizedCoordinates.add(i, coordinates[coordinates.length - 1]); return normalizedCoordinates.toArray(new Coordinate[0]); } @VisibleForTesting static class GroupedParams { Multimap<OccurrenceSearchParameter, String> postFilterParams; Multimap<OccurrenceSearchParameter, String> queryParams; } }
cleanup
occurrence-search/src/main/java/org/gbif/occurrence/search/es/EsSearchRequestBuilder.java
cleanup
Java
apache-2.0
fb3250356daf63603c834536d20390df9df2d5e5
0
getlantern/lantern-java,getlantern/lantern-java,getlantern/lantern-java,lqch14102/lantern,getlantern/lantern-java,lqch14102/lantern,getlantern/lantern-java,lqch14102/lantern,lqch14102/lantern,lqch14102/lantern,lqch14102/lantern,lqch14102/lantern,getlantern/lantern-java,lqch14102/lantern,getlantern/lantern-java,getlantern/lantern-java,lqch14102/lantern,getlantern/lantern-java
package org.lantern.simple; import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.http.HttpRequest; import java.net.InetSocketAddress; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.apache.commons.cli.Option; import org.apache.commons.cli.OptionBuilder; import org.lantern.Cli; import org.lantern.JsonUtils; import org.lantern.LanternUtils; import org.lantern.geoip.GeoIpLookupService; import org.lantern.monitoring.Stats; import org.lantern.monitoring.StatsManager; import org.lantern.monitoring.StatshubAPI; import org.lantern.proxy.GiveModeHttpFilters; import org.lantern.proxy.pt.PluggableTransport; import org.lantern.proxy.pt.PluggableTransports; import org.lantern.proxy.pt.PtType; import org.lantern.state.InstanceStats; import org.lantern.util.Threads; import org.littleshoot.proxy.HttpFilters; import org.littleshoot.proxy.HttpFiltersSourceAdapter; import org.littleshoot.proxy.HttpProxyServer; import org.littleshoot.proxy.HttpProxyServerBootstrap; import org.littleshoot.proxy.TransportProtocol; import org.littleshoot.proxy.impl.DefaultHttpProxyServer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * <p> * A really basic Give mode proxy that listens with both TCP and UDT and trusts * all Get proxies. This proxy is useful for experimentation and is also used * for fallback proxies. * </p> * * <p> * Run like this: * </p> * * <pre> * ./launch -Xmx400m org.lantern.simple.Give -instanceid mytestfallback -host 127.0.0.1 -http 46000 -https 46001 -udt 46002 -keystore ../too-many-secrets/littleproxy_keystore.jks -authtoken '534#^#$523590)' * </pre> * * <pre> * usage: ./launch org.lantern.simple.Give [options] * -authtoken <arg> Auth token that this proxy requires from its * clients. Defaults to '534#^#$523590)'. * -host <arg> (Required) The proxy's public hostname or ip * address * -http <arg> HTTP listen port. Defaults to 80. * -https <arg> HTTPS listen port. Defaults to 443. * -instanceid <arg> The instanceid. If specified, stats will be * reported under this instance id. Otherwise, * stats will not be reported. * -keystore <arg> Path to keystore containing proxy's cert. * Defaults to * ../too-many-secrets/littleproxy_keystore.jks * --pt <property=value> (Optional) Specify pluggable transport * properties * -udt <arg> UDT listen port. Defaults to 9090. * </pre> */ public class Give extends CliProgram { private static final Logger LOGGER = LoggerFactory.getLogger(Give.class); private static final String OPT_HOST = "host"; private static final String OPT_HTTP_PORT = "http"; private static final String OPT_HTTPS_PORT = "https"; private static final String OPT_UDT_PORT = "udt"; private static final String OPT_KEYSTORE = "keystore"; private static final String OPT_AUTHTOKEN = "authtoken"; private static final String OPT_INSTANCE_ID = "instanceid"; private static final String OPT_PT = "pt"; private String host; private int httpsPort; private int httpPort; private Integer udtPort; private String keyStorePath; private String expectedAuthToken; private String instanceId; private PluggableTransport pt; private HttpProxyServer server; private InstanceStats stats = new InstanceStats(); private GeoIpLookupService lookupService = new GeoIpLookupService(); private FallbackActivityTracker activityTracker = new FallbackActivityTracker( stats, lookupService); private final StatshubAPI statshub = new StatshubAPI(); private final ScheduledExecutorService statsScheduler = Threads .newSingleThreadScheduledExecutor("PostStats"); public static void main(String[] args) throws Exception { new Give(args).start(); } public Give(String[] args) { super(args); LanternUtils.setFallbackProxy(true); this.host = cmd.getOptionValue(OPT_HOST); this.httpPort = Integer.parseInt(cmd .getOptionValue(OPT_HTTP_PORT, "80")); this.httpsPort = Integer.parseInt(cmd.getOptionValue(OPT_HTTPS_PORT, "443")); if (cmd.hasOption(OPT_UDT_PORT)) { this.udtPort = Integer.parseInt(cmd.getOptionValue(OPT_UDT_PORT)); } this.keyStorePath = cmd.getOptionValue(OPT_KEYSTORE, "../too-many-secrets/littleproxy_keystore.jks"); this.expectedAuthToken = cmd.getOptionValue(OPT_AUTHTOKEN, "534#^#$523590)"); this.instanceId = cmd.getOptionValue(OPT_INSTANCE_ID); if (cmd.hasOption(OPT_PT)) { initPluggableTransport(cmd .getOptionProperties(Cli.OPTION_PLUGGABLE_TRANSPORT)); } } private void initPluggableTransport(Properties props) { String type = props.getProperty("type"); if (type != null) { PtType proxyPtType = PtType.valueOf(type.toUpperCase()); pt = PluggableTransports.newTransport( proxyPtType, props); LOGGER.info("Using pluggable transport of type {} ", proxyPtType); } } public void start() { LOGGER.info(String .format("Starting Give proxy with the following settings ...\n" + "Host: %1$s\n" + "HTTP port: %2$s\n" + "HTTPS port: %3$s\n" + "UDT port: %4$s\n" + "Keystore path: %5$s\n" + "Auth token: %6$s\n" + "Instance Id: %7$s\n", host, httpPort, httpsPort, udtPort, keyStorePath, expectedAuthToken, instanceId)); startTcp(); if (udtPort != null) { startUdt(); } if (instanceId != null) { startStats(); } } protected void initializeCliOptions() { //@formatter:off addOption(new Option(OPT_HOST, true, "(Required) The proxy's public hostname or ip address"), true); addOption(new Option(OPT_HTTP_PORT, true, "HTTP listen port. Defaults to 80."), false); addOption(new Option(OPT_HTTPS_PORT, true, "HTTPS listen port. Defaults to 443."), false); addOption(new Option(OPT_UDT_PORT, true, "UDT listen port. If not specified, proxy does not listen for UDT connections."), false); addOption(new Option(OPT_KEYSTORE, true, "Path to keystore containing proxy's cert. Defaults to ../too-many-secrets/littleproxy_keystore.jks"), false); addOption(new Option(OPT_AUTHTOKEN, true, "Auth token that this proxy requires from its clients. Defaults to '534#^#$523590)'."), false); addOption(new Option(OPT_INSTANCE_ID, true, "The instanceid. If specified, stats will be reported under this instance id. Otherwise, stats will not be reported."), false); options.addOption(OptionBuilder .withLongOpt(OPT_PT) .withArgName("property=value") .hasArgs(2) .withValueSeparator() .withDescription("(Optional) Specify pluggable transport properties") .create()); //@formatter:on } private void startTcp() { LOGGER.info("Starting Plain Text Give proxy at TCP port {}", httpPort); DefaultHttpProxyServer.bootstrap() .withName("Give-PlainText") .withPort(httpPort) .withAllowLocalOnly(false) .withListenOnAllAddresses(true) // Use a filter to respond with 404 to http requests .withFiltersSource(new HttpFiltersSourceAdapter() { @Override public HttpFilters filterRequest( HttpRequest originalRequest, ChannelHandlerContext ctx) { return new GiveModeHttpFilters(originalRequest, ctx, host, httpPort, TransportProtocol.TCP, expectedAuthToken); } }) .plusActivityTracker(activityTracker) .start(); int serverPort = httpsPort; boolean allowLocalOnly = false; boolean encryptionRequired = true; if (pt != null) { // When using a pluggable transport, the transport will use the // configured port and the server will use some random free port // that only allows local connections serverPort = LanternUtils.findFreePort(); allowLocalOnly = true; encryptionRequired = !pt.suppliesEncryption(); } LOGGER.info( "Starting TLS Give proxy at TCP port {}", httpsPort); HttpProxyServerBootstrap bootstrap = DefaultHttpProxyServer.bootstrap() .withName("Give-Encrypted") .withPort(serverPort) .withAllowLocalOnly(allowLocalOnly) .withListenOnAllAddresses(true) .withAuthenticateSslClients(false) // Use a filter to deny requests other than those contains the // right auth token .withFiltersSource(new HttpFiltersSourceAdapter() { @Override public HttpFilters filterRequest( HttpRequest originalRequest, ChannelHandlerContext ctx) { return new GiveModeHttpFilters(originalRequest, ctx, host, httpsPort, TransportProtocol.TCP, expectedAuthToken); } }) .plusActivityTracker(activityTracker); if (encryptionRequired) { bootstrap.withSslEngineSource( new SimpleSslEngineSource(keyStorePath)); } server = bootstrap.start(); if (pt != null) { LOGGER.info("Starting PluggableTransport"); InetSocketAddress giveModeAddress = server.getListenAddress(); pt.startServer(httpsPort, giveModeAddress); } } private void startUdt() { LOGGER.info("Starting Give proxy at UDT port {}", udtPort); server.clone() .withName("Give-UDT") .withPort(udtPort) .withTransportProtocol(TransportProtocol.UDT) // Use a filter to deny requests other than those contains the // right auth token .withFiltersSource(new HttpFiltersSourceAdapter() { @Override public HttpFilters filterRequest( HttpRequest originalRequest, ChannelHandlerContext ctx) { return new GiveModeHttpFilters(originalRequest, ctx, host, udtPort, TransportProtocol.UDT, expectedAuthToken); } }) .start(); } private void startStats() { LOGGER.info( "Starting to report stats to statshub under instanceid: {}", instanceId); statsScheduler.scheduleAtFixedRate( postStats, 10, StatsManager.FALLBACK_POST_INTERVAL, TimeUnit.SECONDS); statsScheduler.scheduleAtFixedRate( postSiteStats, StatsManager.SITES_POST_INTERVAL, StatsManager.SITES_POST_INTERVAL, TimeUnit.SECONDS); } private Runnable postStats = new Runnable() { @Override public void run() { try { Stats instanceStats = stats.toInstanceStats(); StatsManager.addSystemStats(instanceStats); statshub.postInstanceStats( instanceId, null, StatsManager.UNKNOWN_COUNTRY, true, instanceStats); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Posted stats:\n\n{}\n", JsonUtils.jsonify(instanceStats)); } } catch (Exception e) { LOGGER.warn("Unable to post stats to statshub: {}", e.getMessage(), e); } } }; private Runnable postSiteStats = new Runnable() { @Override public void run() { LOGGER.info("Reporting host requests to statshub"); Map<String, Map<String, Long>> hostRequestsByCountry = activityTracker.pollHostRequestsByCountry(); try { String statName = Stats.Counters.requestsToHost.name(); for (Map.Entry<String, Map<String, Long>> hostRequestsForCountry : hostRequestsByCountry .entrySet()) { String country = hostRequestsForCountry.getKey(); if (country == null || "".equals(country)) { country = Stats.UNKNOWN_COUNTRY; } String countryStatName = statName + "_" + country.toLowerCase(); for (Map.Entry<String, Long> hostRequests : hostRequestsForCountry .getValue().entrySet()) { String host = hostRequests.getKey(); long requests = hostRequests.getValue(); String id = "host_" + host; Stats stats = new Stats(); stats.getIncrements().put(statName, requests); stats.getIncrements().put(countryStatName, requests); Map<String, String> dims = new HashMap<String, String>(); dims.put("host", host); statshub.postStats(id, null, country, false, stats, dims); } } } catch (Exception e) { LOGGER.warn("Unable to post site stats to statshub: {}", e.getMessage(), e); } } }; }
src/main/java/org/lantern/simple/Give.java
package org.lantern.simple; import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.http.HttpRequest; import java.net.InetSocketAddress; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.apache.commons.cli.Option; import org.apache.commons.cli.OptionBuilder; import org.lantern.Cli; import org.lantern.JsonUtils; import org.lantern.LanternUtils; import org.lantern.geoip.GeoIpLookupService; import org.lantern.monitoring.Stats; import org.lantern.monitoring.StatsManager; import org.lantern.monitoring.StatshubAPI; import org.lantern.proxy.GiveModeHttpFilters; import org.lantern.proxy.pt.PluggableTransport; import org.lantern.proxy.pt.PluggableTransports; import org.lantern.proxy.pt.PtType; import org.lantern.state.InstanceStats; import org.lantern.util.Threads; import org.littleshoot.proxy.HttpFilters; import org.littleshoot.proxy.HttpFiltersSourceAdapter; import org.littleshoot.proxy.HttpProxyServer; import org.littleshoot.proxy.HttpProxyServerBootstrap; import org.littleshoot.proxy.TransportProtocol; import org.littleshoot.proxy.impl.DefaultHttpProxyServer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * <p> * A really basic Give mode proxy that listens with both TCP and UDT and trusts * all Get proxies. This proxy is useful for experimentation and is also used * for fallback proxies. * </p> * * <p> * Run like this: * </p> * * <pre> * ./launch -Xmx400m org.lantern.simple.Give -instanceid mytestfallback -host 127.0.0.1 -http 46000 -https 46001 -udt 46002 -keystore ../too-many-secrets/littleproxy_keystore.jks -authtoken '534#^#$523590)' * </pre> * * <pre> * usage: ./launch org.lantern.simple.Give [options] * -authtoken <arg> Auth token that this proxy requires from its * clients. Defaults to '534#^#$523590)'. * -host <arg> (Required) The proxy's public hostname or ip * address * -http <arg> HTTP listen port. Defaults to 80. * -https <arg> HTTPS listen port. Defaults to 443. * -instanceid <arg> The instanceid. If specified, stats will be * reported under this instance id. Otherwise, * stats will not be reported. * -keystore <arg> Path to keystore containing proxy's cert. * Defaults to * ../too-many-secrets/littleproxy_keystore.jks * --pt <property=value> (Optional) Specify pluggable transport * properties * -udt <arg> UDT listen port. Defaults to 9090. * </pre> */ public class Give extends CliProgram { private static final Logger LOGGER = LoggerFactory.getLogger(Give.class); private static final String OPT_HOST = "host"; private static final String OPT_HTTP_PORT = "http"; private static final String OPT_HTTPS_PORT = "https"; private static final String OPT_UDT_PORT = "udt"; private static final String OPT_KEYSTORE = "keystore"; private static final String OPT_AUTHTOKEN = "authtoken"; private static final String OPT_INSTANCE_ID = "instanceid"; private static final String OPT_PT = "pt"; private String host; private int httpsPort; private int httpPort; private Integer udtPort; private String keyStorePath; private String expectedAuthToken; private String instanceId; private PluggableTransport pt; private HttpProxyServer server; private InstanceStats stats = new InstanceStats(); private GeoIpLookupService lookupService = new GeoIpLookupService(); private FallbackActivityTracker activityTracker = new FallbackActivityTracker( stats, lookupService); private final StatshubAPI statshub = new StatshubAPI(); private final ScheduledExecutorService statsScheduler = Threads .newSingleThreadScheduledExecutor("PostStats"); public static void main(String[] args) throws Exception { new Give(args).start(); } public Give(String[] args) { super(args); LanternUtils.setFallbackProxy(true); this.host = cmd.getOptionValue(OPT_HOST); this.httpPort = Integer.parseInt(cmd .getOptionValue(OPT_HTTP_PORT, "80")); this.httpsPort = Integer.parseInt(cmd.getOptionValue(OPT_HTTPS_PORT, "443")); if (cmd.hasOption(OPT_UDT_PORT)) { this.udtPort = Integer.parseInt(cmd.getOptionValue(OPT_UDT_PORT)); } this.keyStorePath = cmd.getOptionValue(OPT_KEYSTORE, "../too-many-secrets/littleproxy_keystore.jks"); this.expectedAuthToken = cmd.getOptionValue(OPT_AUTHTOKEN, "534#^#$523590)"); this.instanceId = cmd.getOptionValue(OPT_INSTANCE_ID); if (cmd.hasOption(OPT_PT)) { initPluggableTransport(cmd .getOptionProperties(Cli.OPTION_PLUGGABLE_TRANSPORT)); } } private void initPluggableTransport(Properties props) { String type = props.getProperty("type"); if (type != null) { PtType proxyPtType = PtType.valueOf(type.toUpperCase()); pt = PluggableTransports.newTransport( proxyPtType, props); LOGGER.info("Using pluggable transport of type {} ", proxyPtType); } } public void start() { LOGGER.info(String .format("Starting Give proxy with the following settings ...\n" + "Host: %1$s\n" + "HTTP port: %2$s\n" + "HTTPS port: %3$s\n" + "UDT port: %4$s\n" + "Keystore path: %5$s\n" + "Auth token: %6$s\n" + "Instance Id: %7$s\n", host, httpPort, httpsPort, udtPort, keyStorePath, expectedAuthToken, instanceId)); startTcp(); if (udtPort != null) { startUdt(); } if (instanceId != null) { startStats(); } } protected void initializeCliOptions() { //@formatter:off addOption(new Option(OPT_HOST, true, "(Required) The proxy's public hostname or ip address"), true); addOption(new Option(OPT_HTTP_PORT, true, "HTTP listen port. Defaults to 80."), false); addOption(new Option(OPT_HTTPS_PORT, true, "HTTPS listen port. Defaults to 443."), false); addOption(new Option(OPT_UDT_PORT, true, "UDT listen port. If not specified, proxy does not listen for UDT connections."), false); addOption(new Option(OPT_KEYSTORE, true, "Path to keystore containing proxy's cert. Defaults to ../too-many-secrets/littleproxy_keystore.jks"), false); addOption(new Option(OPT_AUTHTOKEN, true, "Auth token that this proxy requires from its clients. Defaults to '534#^#$523590)'."), false); addOption(new Option(OPT_INSTANCE_ID, true, "The instanceid. If specified, stats will be reported under this instance id. Otherwise, stats will not be reported."), false); options.addOption(OptionBuilder .withLongOpt(OPT_PT) .withArgName("property=value") .hasArgs(2) .withValueSeparator() .withDescription("(Optional) Specify pluggable transport properties") .create()); //@formatter:on } private void startTcp() { LOGGER.info("Starting Plain Text Give proxy at TCP port {}", httpPort); DefaultHttpProxyServer.bootstrap() .withName("Give-PlainText") .withPort(httpPort) .withAllowLocalOnly(false) .withListenOnAllAddresses(true) // Use a filter to respond with 404 to http requests .withFiltersSource(new HttpFiltersSourceAdapter() { @Override public HttpFilters filterRequest( HttpRequest originalRequest, ChannelHandlerContext ctx) { return new GiveModeHttpFilters(originalRequest, ctx, host, httpPort, TransportProtocol.TCP, expectedAuthToken); } }) .plusActivityTracker(activityTracker) .start(); int serverPort = httpsPort; boolean allowLocalOnly = false; boolean encryptionRequired = true; if (pt != null) { // When using a pluggable transport, the transport will use the // configured port and the server will use some random free port // that only allows local connections serverPort = LanternUtils.findFreePort(); allowLocalOnly = true; encryptionRequired = !pt.suppliesEncryption(); } LOGGER.info( "Starting TLS Give proxy at TCP port {}", httpsPort); HttpProxyServerBootstrap bootstrap = DefaultHttpProxyServer.bootstrap() .withName("Give-Encrypted") .withPort(serverPort) .withAllowLocalOnly(allowLocalOnly) .withListenOnAllAddresses(true) .withAuthenticateSslClients(false) // Use a filter to deny requests other than those contains the // right auth token .withFiltersSource(new HttpFiltersSourceAdapter() { @Override public HttpFilters filterRequest( HttpRequest originalRequest, ChannelHandlerContext ctx) { return new GiveModeHttpFilters(originalRequest, ctx, host, httpsPort, TransportProtocol.TCP, expectedAuthToken); } }) .plusActivityTracker(activityTracker); if (encryptionRequired) { bootstrap.withSslEngineSource( new SimpleSslEngineSource(keyStorePath)); } server = bootstrap.start(); if (pt != null) { LOGGER.info("Starting PluggableTransport"); InetSocketAddress giveModeAddress = server.getListenAddress(); pt.startServer(httpsPort, giveModeAddress); } } private void startUdt() { LOGGER.info("Starting Give proxy at UDT port {}", udtPort); server.clone() .withName("Give-UDT") .withPort(udtPort) .withTransportProtocol(TransportProtocol.UDT) // Use a filter to deny requests other than those contains the // right auth token .withFiltersSource(new HttpFiltersSourceAdapter() { @Override public HttpFilters filterRequest( HttpRequest originalRequest, ChannelHandlerContext ctx) { return new GiveModeHttpFilters(originalRequest, ctx, host, udtPort, TransportProtocol.UDT, expectedAuthToken); } }) .start(); } private void startStats() { LOGGER.info( "Starting to report stats to statshub under instanceid: {}", instanceId); statsScheduler.scheduleAtFixedRate( postStats, 10, StatsManager.FALLBACK_POST_INTERVAL, TimeUnit.SECONDS); statsScheduler.scheduleAtFixedRate( postSiteStats, StatsManager.SITES_POST_INTERVAL, StatsManager.SITES_POST_INTERVAL, TimeUnit.SECONDS); } private Runnable postStats = new Runnable() { @Override public void run() { try { Stats instanceStats = stats.toInstanceStats(); StatsManager.addSystemStats(instanceStats); statshub.postInstanceStats( instanceId, null, StatsManager.UNKNOWN_COUNTRY, true, instanceStats); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Posted stats:\n\n{}\n", JsonUtils.jsonify(instanceStats)); } } catch (Exception e) { LOGGER.warn("Unable to post stats to statshub: {}", e.getMessage(), e); } } }; private Runnable postSiteStats = new Runnable() { @Override public void run() { LOGGER.info("Reporting host requests to statshub"); Map<String, Map<String, Long>> hostRequestsByCountry = activityTracker.pollHostRequestsByCountry(); try { String statName = Stats.Counters.requestsToHost.name(); for (Map.Entry<String, Map<String, Long>> hostRequestsForCountry : hostRequestsByCountry .entrySet()) { String country = hostRequestsForCountry.getKey(); if (country == null || "".equals(country)) { country = Stats.UNKNOWN_COUNTRY; } String countryStatName = statName + "_" + country; for (Map.Entry<String, Long> hostRequests : hostRequestsForCountry .getValue().entrySet()) { String host = hostRequests.getKey(); long requests = hostRequests.getValue(); String id = "host_" + host; Stats stats = new Stats(); stats.getIncrements().put(statName, requests); stats.getIncrements().put(countryStatName, requests); Map<String, String> dims = new HashMap<String, String>(); dims.put("host", host); statshub.postStats(id, null, country, false, stats, dims); } } } catch (Exception e) { LOGGER.warn("Unable to post site stats to statshub: {}", e.getMessage(), e); } } }; }
#1571 Made country suffix lowercase
src/main/java/org/lantern/simple/Give.java
#1571 Made country suffix lowercase
Java
apache-2.0
6cdda1b2fbfbaf6497590de20c378bfa67541dfb
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.editor.impl; import com.intellij.injected.editor.EditorWindow; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.ex.AnActionListener; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.editor.EditorLastActionTracker; import com.intellij.openapi.editor.event.EditorFactoryEvent; import com.intellij.openapi.editor.event.EditorFactoryListener; import com.intellij.openapi.editor.event.EditorMouseEvent; import com.intellij.openapi.editor.event.EditorMouseListener; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public final class EditorLastActionTrackerImpl implements AnActionListener, EditorMouseListener, Disposable, EditorLastActionTracker { private String myLastActionId; private Editor myCurrentEditor; private Editor myLastEditor; EditorLastActionTrackerImpl() { EditorFactory editorFactory = EditorFactory.getInstance(); // to prevent leaks editorFactory.addEditorFactoryListener(new EditorFactoryListener() { @Override public void editorReleased(@NotNull EditorFactoryEvent event) { EditorImpl killedEditor = (EditorImpl)event.getEditor(); if (is(myCurrentEditor, killedEditor)) { myCurrentEditor = null; } if (is(myLastEditor, killedEditor)) { myLastEditor = null; } } }, this); ApplicationManager.getApplication().getMessageBus().connect(this).subscribe(TOPIC, this); editorFactory.getEventMulticaster().addEditorMouseListener(this, this); } private static boolean is(Editor currentEditor, EditorImpl killedEditor) { return currentEditor == killedEditor || currentEditor instanceof EditorWindow && ((EditorWindow)currentEditor).getDelegate() == killedEditor; } @Override public void dispose() { } @Override @Nullable public String getLastActionId() { return myLastActionId; } @Override public void beforeActionPerformed(@NotNull AnAction action, @NotNull DataContext dataContext, @NotNull AnActionEvent event) { myCurrentEditor = CommonDataKeys.EDITOR.getData(dataContext); if (myCurrentEditor != myLastEditor) { resetLastAction(); } } @Override public void afterActionPerformed(@NotNull AnAction action, @NotNull DataContext dataContext, @NotNull AnActionEvent event) { myLastActionId = getActionId(action); myLastEditor = myCurrentEditor; myCurrentEditor = null; } @Override public void beforeEditorTyping(char c, @NotNull DataContext dataContext) { resetLastAction(); } @Override public void mousePressed(@NotNull EditorMouseEvent e) { resetLastAction(); } @Override public void mouseClicked(@NotNull EditorMouseEvent e) { resetLastAction(); } @Override public void mouseReleased(@NotNull EditorMouseEvent e) { resetLastAction(); } private static String getActionId(AnAction action) { return action instanceof ActionStub ? ((ActionStub)action).getId() : ActionManager.getInstance().getId(action); } private void resetLastAction() { myLastActionId = null; myLastEditor = null; } }
platform/platform-impl/src/com/intellij/openapi/editor/impl/EditorLastActionTrackerImpl.java
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.editor.impl; import com.intellij.injected.editor.EditorWindow; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.ex.AnActionListener; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.components.BaseComponent; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.editor.EditorLastActionTracker; import com.intellij.openapi.editor.event.*; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public class EditorLastActionTrackerImpl implements AnActionListener, EditorMouseListener, Disposable, EditorLastActionTracker, BaseComponent { private final ActionManager myActionManager; private final EditorEventMulticaster myEditorEventMulticaster; private String myLastActionId; private Editor myCurrentEditor; private Editor myLastEditor; EditorLastActionTrackerImpl(ActionManager actionManager, EditorFactory editorFactory) { myActionManager = actionManager; myEditorEventMulticaster = editorFactory.getEventMulticaster(); // to prevent leaks editorFactory.addEditorFactoryListener(new EditorFactoryListener() { @Override public void editorReleased(@NotNull EditorFactoryEvent event) { EditorImpl killedEditor = (EditorImpl)event.getEditor(); if (is(myCurrentEditor, killedEditor)) { myCurrentEditor = null; } if (is(myLastEditor, killedEditor)) { myLastEditor = null; } } }, this); } private static boolean is(Editor currentEditor, EditorImpl killedEditor) { return currentEditor == killedEditor || currentEditor instanceof EditorWindow && ((EditorWindow)currentEditor).getDelegate() == killedEditor; } @Override public void initComponent() { ApplicationManager.getApplication().getMessageBus().connect(this).subscribe(TOPIC, this); myEditorEventMulticaster.addEditorMouseListener(this, this); } @Override public void dispose() { } @Override @Nullable public String getLastActionId() { return myLastActionId; } @Override public void beforeActionPerformed(@NotNull AnAction action, @NotNull DataContext dataContext, @NotNull AnActionEvent event) { myCurrentEditor = CommonDataKeys.EDITOR.getData(dataContext); if (myCurrentEditor != myLastEditor) { resetLastAction(); } } @Override public void afterActionPerformed(@NotNull AnAction action, @NotNull DataContext dataContext, @NotNull AnActionEvent event) { myLastActionId = getActionId(action); myLastEditor = myCurrentEditor; myCurrentEditor = null; } @Override public void beforeEditorTyping(char c, @NotNull DataContext dataContext) { resetLastAction(); } @Override public void mousePressed(@NotNull EditorMouseEvent e) { resetLastAction(); } @Override public void mouseClicked(@NotNull EditorMouseEvent e) { resetLastAction(); } @Override public void mouseReleased(@NotNull EditorMouseEvent e) { resetLastAction(); } private String getActionId(AnAction action) { return action instanceof ActionStub ? ((ActionStub)action).getId() : myActionManager.getId(action); } private void resetLastAction() { myLastActionId = null; myLastEditor = null; } }
EditorLastActionTrackerImpl - BaseComponent not required
platform/platform-impl/src/com/intellij/openapi/editor/impl/EditorLastActionTrackerImpl.java
EditorLastActionTrackerImpl - BaseComponent not required
Java
apache-2.0
b838f6160927e1ec2fef1db2bc207cce9b5942ef
0
chtyim/cdap,anthcp/cdap,hsaputra/cdap,hsaputra/cdap,mpouttuclarke/cdap,anthcp/cdap,caskdata/cdap,hsaputra/cdap,anthcp/cdap,caskdata/cdap,anthcp/cdap,hsaputra/cdap,mpouttuclarke/cdap,chtyim/cdap,caskdata/cdap,chtyim/cdap,mpouttuclarke/cdap,chtyim/cdap,mpouttuclarke/cdap,anthcp/cdap,hsaputra/cdap,chtyim/cdap,mpouttuclarke/cdap,caskdata/cdap,caskdata/cdap,chtyim/cdap,caskdata/cdap
/* * Copyright 2012-2013 Continuuity,Inc. All Rights Reserved. */ package com.continuuity.passport.http.handlers; import com.continuuity.passport.core.exceptions.AccountAlreadyExistsException; import com.continuuity.passport.core.exceptions.AccountNotFoundException; import com.continuuity.passport.core.exceptions.VPCNotFoundException; import com.continuuity.passport.core.security.UsernamePasswordApiKeyToken; import com.continuuity.passport.core.service.AuthenticatorService; import com.continuuity.passport.core.service.DataManagementService; import com.continuuity.passport.core.status.AuthenticationStatus; import com.continuuity.passport.core.utils.PasswordUtils; import com.continuuity.passport.meta.Account; import com.continuuity.passport.meta.VPC; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParseException; import com.google.gson.JsonParser; import com.google.inject.Inject; import com.google.inject.Singleton; import javax.ws.rs.*; import javax.ws.rs.core.Response; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Annotations for endpoints, method types and data types for handling Http requests * Note: Jersey has a limitation of not allowing multiple resource handlers share the same path. * As a result we are needing to have all the code in a single file. This will be potentially * huge. Need to find a work-around. */ @Path("/passport/v1/account/") @Singleton public class AccountHandler extends PassportHandler { private final DataManagementService dataManagementService; private final AuthenticatorService authenticatorService; @Inject public AccountHandler(DataManagementService dataManagementService, AuthenticatorService authenticatorService) { this.dataManagementService = dataManagementService; this.authenticatorService = authenticatorService; } @Path("{id}") @GET @Produces("application/json") public Response getAccountInfo(@PathParam("id") int id) { requestReceived(); Account account = dataManagementService.getAccount(id); if (account != null) { requestSuccess(); return Response.ok(account.toString()).build(); } else { requestFailed(); return Response.status(Response.Status.NOT_FOUND) .entity(Utils.getJsonError("Account not found")) .build(); } } @Path("{id}/password") @PUT @Produces("application/json") @Consumes("application/json") public Response changePassword(@PathParam("id") int id, String data) { try { requestReceived(); JsonParser parser = new JsonParser(); JsonElement element = parser.parse(data); JsonObject jsonObject = element.getAsJsonObject(); String oldPassword = jsonObject.get("old_password") == null ? null : jsonObject.get("old_password").getAsString(); String newPassword = jsonObject.get("new_password") == null ? null : jsonObject.get("new_password").getAsString(); if ((oldPassword == null) || (oldPassword.isEmpty()) || (newPassword == null) || (newPassword.isEmpty())) { requestFailed(); // Request failed return Response.status(Response.Status.BAD_REQUEST) .entity(Utils.getJson("FAILED", "Must pass in old_password and new_password")) .build(); } dataManagementService.changePassword(id, oldPassword, newPassword); //Contract for the api is to return updated account to avoid a second call from the caller to get the // updated account Account account = dataManagementService.getAccount(id); if (account != null) { requestSuccess(); return Response.ok(account.toString()).build(); } else { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJson("FAILED", "Failed to get updated account")) .build(); } } catch (Exception e) { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJson("FAILED", String.format("Download confirmation failed. %s", e.getMessage()))) .build(); } } @Path("{id}/downloaded") @PUT @Produces("application/json") public Response confirmDownload(@PathParam("id") int id) { requestReceived(); try { dataManagementService.confirmDownload(id); //Contract for the api is to return updated account to avoid a second call from the caller to get the // updated account Account account = dataManagementService.getAccount(id); if (account != null) { requestSuccess(); return Response.ok(account.toString()).build(); } else { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJson("FAILED", "Failed to get updated account")) .build(); } } catch (Exception e) { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJson("FAILED", String.format("Download confirmation failed. %s", e.getMessage()))) .build(); } } @Path("{id}") @PUT @Produces("application/json") @Consumes("application/json") public Response updateAccount(@PathParam("id") int id, String data) { requestReceived(); try { JsonParser parser = new JsonParser(); JsonElement element = parser.parse(data); JsonObject jsonObject = element.getAsJsonObject(); Map<String, Object> updateParams = new HashMap<String, Object>(); String firstName = jsonObject.get("first_name") == null ? null : jsonObject.get("first_name").getAsString(); String lastName = jsonObject.get("last_name") == null ? null : jsonObject.get("last_name").getAsString(); String company = jsonObject.get("company") == null ? null : jsonObject.get("company").getAsString(); //TODO: Find a better way to update the map if (firstName != null) { updateParams.put("first_name", firstName); } if (lastName != null) { updateParams.put("last_name", lastName); } if (company != null) { updateParams.put("company", company); } dataManagementService.updateAccount(id, updateParams); //Contract for the api is to return updated account to avoid a second call from the caller to get the // updated account Account account = dataManagementService.getAccount(id); if (account != null) { requestSuccess(); return Response.ok(account.toString()).build(); } else { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJson("FAILED", "Failed to get updated account")) .build(); } } catch (JsonParseException e) { requestFailed(); return Response.status(Response.Status.BAD_REQUEST) .entity(Utils.getJson("FAILED", String.format("Json parse exception. %s", e.getMessage()))) .build(); } catch (Exception e) { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJson("FAILED", String.format("Account Update Failed. %s", e.getMessage()))) .build(); } } @POST @Produces("application/json") @Consumes("application/json") public Response createAccount(String data) { requestReceived(); String emailId = null; try { JsonParser parser = new JsonParser(); JsonElement element = parser.parse(data); JsonObject jsonObject = element.getAsJsonObject(); emailId = jsonObject.get("email_id") == null ? null : jsonObject.get("email_id").getAsString(); if ((emailId == null)) { return Response.status(Response.Status.BAD_REQUEST) .entity(Utils.getJson("FAILED", "Email id is missing")).build(); } else { Account account = dataManagementService.registerAccount(new Account("", "", "", emailId)); requestSuccess(); return Response.ok(account.toString()).build(); } } catch (AccountAlreadyExistsException e) { //If the account already exists - return the existing account so that the caller can take appropriate action Account account = dataManagementService.getAccount(emailId); requestFailed(); // Request failed return Response.status(Response.Status.CONFLICT) .entity(Utils.getJsonError("FAILED", account.toString())) .build(); } catch (JsonParseException e) { requestFailed(); return Response.status(Response.Status.BAD_REQUEST) .entity(Utils.getJson("FAILED", String.format("Json parse exception. %s", e.getMessage()))) .build(); }catch (Exception e) { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJson("FAILED", String.format("Account Creation Failed. %s", e))) .build(); } } @Path("{id}/confirmed") @PUT @Produces("application/json") @Consumes("application/json") public Response confirmAccount(String data, @PathParam("id") int id) { requestReceived(); try { JsonParser parser = new JsonParser(); JsonElement element = parser.parse(data); JsonObject jsonObject = element.getAsJsonObject(); String accountPassword = jsonObject.get("password") == null ? null : jsonObject.get("password").getAsString(); String firstName = jsonObject.get("first_name") == null ? null : jsonObject.get("first_name").getAsString(); String lastName = jsonObject.get("last_name") == null ? null : jsonObject.get("last_name").getAsString(); String company = jsonObject.get("company") == null ? null : jsonObject.get("company").getAsString(); if ((accountPassword == null) || (accountPassword.isEmpty()) || (firstName == null) || (firstName.isEmpty()) || (lastName == null) || (lastName.isEmpty()) || (company == null) || (company.isEmpty())) { requestFailed(); // Request failed return Response.status(Response.Status.BAD_REQUEST) .entity(Utils.getJson("FAILED", "password, first_name, last_name, company should be passed in")).build(); } else { Account account = new Account(firstName, lastName, company, id); dataManagementService.confirmRegistration(account, accountPassword); //Contract for the api is to return updated account to avoid a second call from the caller to get the // updated account Account accountFetched = dataManagementService.getAccount(id); if (accountFetched != null) { requestSuccess(); return Response.ok(accountFetched.toString()).build(); } else { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJson("FAILED", "Failed to get updated account")) .build(); } } } catch (JsonParseException e) { requestFailed(); return Response.status(Response.Status.BAD_REQUEST) .entity(Utils.getJson("FAILED", String.format("Json parse exception. %s", e.getMessage()))) .build(); } catch (Exception e) { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJson("FAILED", String.format("Account Confirmation Failed. %s", e))) .build(); } } @Path("{id}/vpc") @POST @Produces("application/json") @Consumes("application/json") public Response createVPC(String data, @PathParam("id") int id) { requestReceived(); try { JsonParser parser = new JsonParser(); JsonElement element = parser.parse(data); JsonObject jsonObject = element.getAsJsonObject(); String vpcName = jsonObject.get("vpc_name") == null ? null : jsonObject.get("vpc_name").getAsString(); String vpcLabel = jsonObject.get("vpc_label") == null ? null : jsonObject.get("vpc_label").getAsString(); if ((vpcName != null) && (!vpcName.isEmpty()) && (vpcLabel != null) && (!vpcLabel.isEmpty())) { VPC vpc = dataManagementService.addVPC(id, new VPC(vpcName, vpcLabel)); requestSuccess(); return Response.ok(vpc.toString()).build(); } else { requestFailed(); // Request failed return Response.status(Response.Status.BAD_REQUEST) .entity(Utils.getJson("FAILED", "VPC creation failed. vpc_name is missing")) .build(); } } catch (JsonParseException e) { requestFailed(); return Response.status(Response.Status.BAD_REQUEST) .entity(Utils.getJson("FAILED", String.format("Json parse exception. %s", e.getMessage()))) .build(); } catch (Exception e) { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJson("FAILED", String.format("VPC Creation Failed. %s", e))) .build(); } } @Path("{id}/vpc") @GET @Produces("application/json") public Response getVPC(@PathParam("id") int id) { requestReceived(); try { List<VPC> vpcList = dataManagementService.getVPC(id); if (vpcList.isEmpty()) { return Response.ok("[]").build(); } else { StringBuilder sb = new StringBuilder(); sb.append("["); boolean first = true; for (VPC vpc : vpcList) { if (first) { first = false; } else { sb.append(","); } sb.append(vpc.toString()); } sb.append("]"); requestSuccess(); return Response.ok(sb.toString()).build(); } } catch (Exception e) { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJsonError(String.format("VPC get Failed. %s", e.getMessage()))) .build(); } } @Path("{accountId}/vpc/{vpcId}") @GET @Produces("application/json") public Response getSingleVPC(@PathParam("accountId") int accountId, @PathParam("vpcId") int vpcId) { requestReceived(); try { VPC vpc = dataManagementService.getVPC(accountId, vpcId); if (vpc == null) { requestFailed(); // Request failed return Response.status(Response.Status.NOT_FOUND) .entity(Utils.getJsonError("VPC not found")).build(); } else { requestSuccess(); return Response.ok(vpc.toString()).build(); } } catch (Exception e) { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJsonError(String.format("VPC get Failed. %s", e.getMessage()))) .build(); } } @Path("authenticate") @POST @Produces("application/json") @Consumes("application/json") public Response authenticate(String data, @HeaderParam("X-Continuuity-ApiKey") String apiKey) { //Logic - // Either use emailId and password if present for auth // if not present use ApiKey // If username and password is passed it can't be null // Dummy username and password is used if apiKey is passed to enable it to work with shiro requestReceived(); String emailId = UsernamePasswordApiKeyToken.DUMMY_USER; String password = UsernamePasswordApiKeyToken.DUMMY_PASSWORD; boolean useApiKey = true; if (data != null && !data.isEmpty()) { JsonParser parser = new JsonParser(); JsonElement element = parser.parse(data); JsonObject jsonObject = element.getAsJsonObject(); password = jsonObject.get("password") == null ? null : jsonObject.get("password").getAsString(); emailId = jsonObject.get("email_id") == null ? null : jsonObject.get("email_id").getAsString(); useApiKey = false; } if (emailId == null || emailId.isEmpty() || password == null || password.isEmpty()) { requestFailed(); return Response.status(Response.Status.BAD_REQUEST).entity( Utils.getAuthenticatedJson("Bad Request.", "Username and password can't be null")) .build(); } UsernamePasswordApiKeyToken token = null; if (useApiKey) { token = new UsernamePasswordApiKeyToken(UsernamePasswordApiKeyToken.DUMMY_USER, UsernamePasswordApiKeyToken.DUMMY_PASSWORD, apiKey, true); } else { String hashed = PasswordUtils.generateHashedPassword(password); token = new UsernamePasswordApiKeyToken(emailId, hashed, apiKey, false); } try { AuthenticationStatus status = authenticatorService.authenticate(token); if (status.getType().equals(AuthenticationStatus.Type.AUTHENTICATED)) { //TODO: Better naming for authenticatedJson? requestSuccess(); return Response.ok(status.getMessage()).build(); } else { requestFailed(); //Failed request return Response.status(Response.Status.UNAUTHORIZED).entity( Utils.getAuthenticatedJson("Authentication Failed.", "Either user doesn't exist or password doesn't match")) .build(); } } catch (Exception e) { requestFailed(); //Failed request return Response.status(Response.Status.UNAUTHORIZED).entity( Utils.getAuthenticatedJson("Authentication Failed.", e.getMessage())).build(); } } @Path("{id}/regenerateApiKey") @GET @Produces("application/json") public Response regenerateApiKey(@PathParam("id") int accountId) { try { dataManagementService.regenerateApiKey(accountId); //Contract for the api is to return updated account to avoid a second call from the caller to get the // updated account Account accountFetched = dataManagementService.getAccount(accountId); if (accountFetched != null) { requestSuccess(); return Response.ok(accountFetched.toString()).build(); } else { requestFailed(); // Request failed return Response.status(Response.Status.NOT_FOUND) .entity(Utils.getJson("FAILED", "Failed to get regenerate key. Account not found")) .build(); } } catch (Exception e) { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJson("FAILED", "Failed to get regenerate key")) .build(); } } @Path("{id}") @DELETE @Produces("application/json") public Response deleteAccount(@PathParam("id") int id) { requestReceived(); try { dataManagementService.deleteAccount(id); requestSuccess(); return Response.ok().entity(Utils.getJsonOK()).build(); } catch (AccountNotFoundException e) { requestFailed(); //Failed request return Response.status(Response.Status.NOT_FOUND) .entity(Utils.getJsonError("Account not found")) .build(); } catch (RuntimeException e) { requestFailed(); //Failed request return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJsonError("Account delete Failed", e.getMessage())) .build(); } } @Path("{accountId}/vpc/{vpcId}") @DELETE @Produces("application/json") public Response deleteVPC(@PathParam("accountId") int accountId, @PathParam("vpcId") int vpcId) { requestReceived(); try { dataManagementService.deleteVPC(accountId, vpcId); requestSuccess(); return Response.ok().entity(Utils.getJsonOK()).build(); } catch (VPCNotFoundException e) { requestFailed(); //Failed request return Response.status(Response.Status.NOT_FOUND) .entity(Utils.getJsonError("VPC not found")) .build(); } catch (RuntimeException e) { requestFailed(); //Failed request return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJsonError("VPC delete Failed", e.getMessage())) .build(); } } }
src/main/java/com/continuuity/passport/http/handlers/AccountHandler.java
/* * Copyright 2012-2013 Continuuity,Inc. All Rights Reserved. */ package com.continuuity.passport.http.handlers; import com.continuuity.passport.core.exceptions.AccountAlreadyExistsException; import com.continuuity.passport.core.exceptions.AccountNotFoundException; import com.continuuity.passport.core.exceptions.VPCNotFoundException; import com.continuuity.passport.core.security.UsernamePasswordApiKeyToken; import com.continuuity.passport.core.service.AuthenticatorService; import com.continuuity.passport.core.service.DataManagementService; import com.continuuity.passport.core.status.AuthenticationStatus; import com.continuuity.passport.core.utils.PasswordUtils; import com.continuuity.passport.meta.Account; import com.continuuity.passport.meta.VPC; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import com.google.inject.Inject; import com.google.inject.Singleton; import javax.ws.rs.*; import javax.ws.rs.core.Response; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Annotations for endpoints, method types and data types for handling Http requests * Note: Jersey has a limitation of not allowing multiple resource handlers share the same path. * As a result we are needing to have all the code in a single file. This will be potentially * huge. Need to find a work-around. */ @Path("/passport/v1/account/") @Singleton public class AccountHandler extends PassportHandler { private final DataManagementService dataManagementService; private final AuthenticatorService authenticatorService; @Inject public AccountHandler(DataManagementService dataManagementService, AuthenticatorService authenticatorService) { this.dataManagementService = dataManagementService; this.authenticatorService = authenticatorService; } @Path("{id}") @GET @Produces("application/json") public Response getAccountInfo(@PathParam("id") int id) { requestReceived(); Account account = dataManagementService.getAccount(id); if (account != null) { requestSuccess(); return Response.ok(account.toString()).build(); } else { requestFailed(); return Response.status(Response.Status.NOT_FOUND) .entity(Utils.getJsonError("Account not found")) .build(); } } @Path("{id}/password") @PUT @Produces("application/json") @Consumes("application/json") public Response changePassword(@PathParam("id") int id, String data) { try { requestReceived(); JsonParser parser = new JsonParser(); JsonElement element = parser.parse(data); JsonObject jsonObject = element.getAsJsonObject(); String oldPassword = jsonObject.get("old_password") == null ? null : jsonObject.get("old_password").getAsString(); String newPassword = jsonObject.get("new_password") == null ? null : jsonObject.get("new_password").getAsString(); if ((oldPassword == null) || (oldPassword.isEmpty()) || (newPassword == null) || (newPassword.isEmpty())) { requestFailed(); // Request failed return Response.status(Response.Status.BAD_REQUEST) .entity(Utils.getJson("FAILED", "Must pass in old_password and new_password")) .build(); } dataManagementService.changePassword(id, oldPassword, newPassword); //Contract for the api is to return updated account to avoid a second call from the caller to get the // updated account Account account = dataManagementService.getAccount(id); if (account != null) { requestSuccess(); return Response.ok(account.toString()).build(); } else { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJson("FAILED", "Failed to get updated account")) .build(); } } catch (Exception e) { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJson("FAILED", String.format("Download confirmation failed. %s", e.getMessage()))) .build(); } } @Path("{id}/downloaded") @PUT @Produces("application/json") public Response confirmDownload(@PathParam("id") int id) { requestReceived(); try { dataManagementService.confirmDownload(id); //Contract for the api is to return updated account to avoid a second call from the caller to get the // updated account Account account = dataManagementService.getAccount(id); if (account != null) { requestSuccess(); return Response.ok(account.toString()).build(); } else { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJson("FAILED", "Failed to get updated account")) .build(); } } catch (Exception e) { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJson("FAILED", String.format("Download confirmation failed. %s", e.getMessage()))) .build(); } } @Path("{id}") @PUT @Produces("application/json") @Consumes("application/json") public Response updateAccount(@PathParam("id") int id, String data) { requestReceived(); try { JsonParser parser = new JsonParser(); JsonElement element = parser.parse(data); JsonObject jsonObject = element.getAsJsonObject(); Map<String, Object> updateParams = new HashMap<String, Object>(); String firstName = jsonObject.get("first_name") == null ? null : jsonObject.get("first_name").getAsString(); String lastName = jsonObject.get("last_name") == null ? null : jsonObject.get("last_name").getAsString(); String company = jsonObject.get("company") == null ? null : jsonObject.get("company").getAsString(); //TODO: Find a better way to update the map if (firstName != null) { updateParams.put("first_name", firstName); } if (lastName != null) { updateParams.put("last_name", lastName); } if (company != null) { updateParams.put("company", company); } dataManagementService.updateAccount(id, updateParams); //Contract for the api is to return updated account to avoid a second call from the caller to get the // updated account Account account = dataManagementService.getAccount(id); if (account != null) { requestSuccess(); return Response.ok(account.toString()).build(); } else { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJson("FAILED", "Failed to get updated account")) .build(); } } catch (Exception e) { requestFailed(); // Request failed return Response.status(Response.Status.BAD_REQUEST) .entity(Utils.getJson("FAILED", String.format("Account Update Failed. %s", e.getMessage()))) .build(); } } @POST @Produces("application/json") @Consumes("application/json") public Response createAccount(String data) { requestReceived(); String emailId = null; try { JsonParser parser = new JsonParser(); JsonElement element = parser.parse(data); JsonObject jsonObject = element.getAsJsonObject(); emailId = jsonObject.get("email_id") == null ? null : jsonObject.get("email_id").getAsString(); if ((emailId == null)) { return Response.status(Response.Status.BAD_REQUEST) .entity(Utils.getJson("FAILED", "Email id is missing")).build(); } else { Account account = dataManagementService.registerAccount(new Account("", "", "", emailId)); requestSuccess(); return Response.ok(account.toString()).build(); } } catch (AccountAlreadyExistsException e) { //If the account already exists - return the existing account so that the caller can take appropriate action Account account = dataManagementService.getAccount(emailId); requestFailed(); // Request failed return Response.status(Response.Status.CONFLICT) .entity(Utils.getJsonError("FAILED", account.toString())) .build(); } catch (Exception e) { requestFailed(); // Request failed return Response.status(Response.Status.BAD_REQUEST) .entity(Utils.getJson("FAILED", String.format("Account Creation Failed. %s", e))) .build(); } } @Path("{id}/confirmed") @PUT @Produces("application/json") @Consumes("application/json") public Response confirmAccount(String data, @PathParam("id") int id) { requestReceived(); try { JsonParser parser = new JsonParser(); JsonElement element = parser.parse(data); JsonObject jsonObject = element.getAsJsonObject(); String accountPassword = jsonObject.get("password") == null ? null : jsonObject.get("password").getAsString(); String firstName = jsonObject.get("first_name") == null ? null : jsonObject.get("first_name").getAsString(); String lastName = jsonObject.get("last_name") == null ? null : jsonObject.get("last_name").getAsString(); String company = jsonObject.get("company") == null ? null : jsonObject.get("company").getAsString(); if ((accountPassword == null) || (accountPassword.isEmpty()) || (firstName == null) || (firstName.isEmpty()) || (lastName == null) || (lastName.isEmpty()) || (company == null) || (company.isEmpty())) { requestFailed(); // Request failed return Response.status(Response.Status.BAD_REQUEST) .entity(Utils.getJson("FAILED", "password, first_name, last_name, company should be passed in")).build(); } else { Account account = new Account(firstName, lastName, company, id); dataManagementService.confirmRegistration(account, accountPassword); //Contract for the api is to return updated account to avoid a second call from the caller to get the // updated account Account accountFetched = dataManagementService.getAccount(id); if (accountFetched != null) { requestSuccess(); return Response.ok(accountFetched.toString()).build(); } else { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJson("FAILED", "Failed to get updated account")) .build(); } } } catch (Exception e) { requestFailed(); // Request failed return Response.status(Response.Status.BAD_REQUEST) .entity(Utils.getJson("FAILED", String.format("Account Confirmation Failed. %s", e))) .build(); } } @Path("{id}/vpc") @POST @Produces("application/json") @Consumes("application/json") public Response createVPC(String data, @PathParam("id") int id) { requestReceived(); try { JsonParser parser = new JsonParser(); JsonElement element = parser.parse(data); JsonObject jsonObject = element.getAsJsonObject(); String vpcName = jsonObject.get("vpc_name") == null ? null : jsonObject.get("vpc_name").getAsString(); String vpcLabel = jsonObject.get("vpc_label") == null ? null : jsonObject.get("vpc_label").getAsString(); if ((vpcName != null) && (!vpcName.isEmpty()) && (vpcLabel != null) && (!vpcLabel.isEmpty())) { VPC vpc = dataManagementService.addVPC(id, new VPC(vpcName, vpcLabel)); requestSuccess(); return Response.ok(vpc.toString()).build(); } else { requestFailed(); // Request failed return Response.status(Response.Status.BAD_REQUEST) .entity(Utils.getJson("FAILED", "VPC creation failed. vpc_name is missing")) .build(); } } catch (Exception e) { requestFailed(); // Request failed return Response.status(Response.Status.BAD_REQUEST) .entity(Utils.getJson("FAILED", String.format("VPC Creation Failed. %s", e))) .build(); } } @Path("{id}/vpc") @GET @Produces("application/json") public Response getVPC(@PathParam("id") int id) { requestReceived(); try { List<VPC> vpcList = dataManagementService.getVPC(id); if (vpcList.isEmpty()) { return Response.ok("[]").build(); } else { StringBuilder sb = new StringBuilder(); sb.append("["); boolean first = true; for (VPC vpc : vpcList) { if (first) { first = false; } else { sb.append(","); } sb.append(vpc.toString()); } sb.append("]"); requestSuccess(); return Response.ok(sb.toString()).build(); } } catch (Exception e) { requestFailed(); // Request failed return Response.status(Response.Status.BAD_REQUEST) .entity(Utils.getJsonError(String.format("VPC get Failed. %s", e.getMessage()))) .build(); } } @Path("{accountId}/vpc/{vpcId}") @GET @Produces("application/json") public Response getSingleVPC(@PathParam("accountId") int accountId, @PathParam("vpcId") int vpcId) { requestReceived(); try { VPC vpc = dataManagementService.getVPC(accountId, vpcId); if (vpc == null) { requestFailed(); // Request failed return Response.status(Response.Status.NOT_FOUND) .entity(Utils.getJsonError("VPC not found")).build(); } else { requestSuccess(); return Response.ok(vpc.toString()).build(); } } catch (Exception e) { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJsonError(String.format("VPC get Failed. %s", e.getMessage()))) .build(); } } @Path("authenticate") @POST @Produces("application/json") @Consumes("application/json") public Response authenticate(String data, @HeaderParam("X-Continuuity-ApiKey") String apiKey) { //Logic - // Either use emailId and password if present for auth // if not present use ApiKey // If username and password is passed it can't be null // Dummy username and password is used if apiKey is passed to enable it to work with shiro requestReceived(); String emailId = UsernamePasswordApiKeyToken.DUMMY_USER; String password = UsernamePasswordApiKeyToken.DUMMY_PASSWORD; boolean useApiKey = true; if (data != null && !data.isEmpty()) { JsonParser parser = new JsonParser(); JsonElement element = parser.parse(data); JsonObject jsonObject = element.getAsJsonObject(); password = jsonObject.get("password") == null ? null : jsonObject.get("password").getAsString(); emailId = jsonObject.get("email_id") == null ? null : jsonObject.get("email_id").getAsString(); useApiKey = false; } if (emailId == null || emailId.isEmpty() || password == null || password.isEmpty()) { requestFailed(); return Response.status(Response.Status.BAD_REQUEST).entity( Utils.getAuthenticatedJson("Bad Request.", "Username and password can't be null")) .build(); } UsernamePasswordApiKeyToken token = null; if (useApiKey) { token = new UsernamePasswordApiKeyToken(UsernamePasswordApiKeyToken.DUMMY_USER, UsernamePasswordApiKeyToken.DUMMY_PASSWORD, apiKey, true); } else { String hashed = PasswordUtils.generateHashedPassword(password); token = new UsernamePasswordApiKeyToken(emailId, hashed, apiKey, false); } try { AuthenticationStatus status = authenticatorService.authenticate(token); if (status.getType().equals(AuthenticationStatus.Type.AUTHENTICATED)) { //TODO: Better naming for authenticatedJson? requestSuccess(); return Response.ok(status.getMessage()).build(); } else { requestFailed(); //Failed request return Response.status(Response.Status.UNAUTHORIZED).entity( Utils.getAuthenticatedJson("Authentication Failed.", "Either user doesn't exist or password doesn't match")) .build(); } } catch (Exception e) { requestFailed(); //Failed request return Response.status(Response.Status.UNAUTHORIZED).entity( Utils.getAuthenticatedJson("Authentication Failed.", e.getMessage())).build(); } } @Path("{id}/regenerateApiKey") @GET @Produces("application/json") public Response regenerateApiKey(@PathParam("id") int accountId) { try { dataManagementService.regenerateApiKey(accountId); //Contract for the api is to return updated account to avoid a second call from the caller to get the // updated account Account accountFetched = dataManagementService.getAccount(accountId); if (accountFetched != null) { requestSuccess(); return Response.ok(accountFetched.toString()).build(); } else { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJson("FAILED", "Failed to get regenerate key")) .build(); } } catch (Exception e) { requestFailed(); // Request failed return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJson("FAILED", "Failed to get regenerate key")) .build(); } } @Path("{id}") @DELETE @Produces("application/json") public Response deleteAccount(@PathParam("id") int id) { requestReceived(); try { dataManagementService.deleteAccount(id); requestSuccess(); return Response.ok().entity(Utils.getJsonOK()).build(); } catch (AccountNotFoundException e) { requestFailed(); //Failed request return Response.status(Response.Status.NOT_FOUND) .entity(Utils.getJsonError("Account not found")) .build(); } catch (RuntimeException e) { requestFailed(); //Failed request return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJsonError("Account delete Failed", e.getMessage())) .build(); } } @Path("{accountId}/vpc/{vpcId}") @DELETE @Produces("application/json") public Response deleteVPC(@PathParam("accountId") int accountId, @PathParam("vpcId") int vpcId) { requestReceived(); try { dataManagementService.deleteVPC(accountId, vpcId); requestSuccess(); return Response.ok().entity(Utils.getJsonOK()).build(); } catch (VPCNotFoundException e) { requestFailed(); //Failed request return Response.status(Response.Status.NOT_FOUND) .entity(Utils.getJsonError("VPC not found")) .build(); } catch (RuntimeException e) { requestFailed(); //Failed request return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity(Utils.getJsonError("VPC delete Failed", e.getMessage())) .build(); } } }
Enhance exception handling. Add JsonParse exception in account handler. Return appropriate status codes
src/main/java/com/continuuity/passport/http/handlers/AccountHandler.java
Enhance exception handling. Add JsonParse exception in account handler. Return appropriate status codes
Java
apache-2.0
8178ec4d125beeca316dfbc2f572108b51aab091
0
alxdarksage/BridgePF,Sage-Bionetworks/BridgePF,alxdarksage/BridgePF,Sage-Bionetworks/BridgePF,Sage-Bionetworks/BridgePF,DwayneJengSage/BridgePF,DwayneJengSage/BridgePF,alxdarksage/BridgePF,DwayneJengSage/BridgePF
package org.sagebionetworks.bridge.models.accounts; import static org.junit.Assert.*; import org.junit.Test; import org.sagebionetworks.bridge.config.Environment; import org.sagebionetworks.bridge.dao.ParticipantOption.SharingScope; import org.sagebionetworks.bridge.json.BridgeObjectMapper; import org.sagebionetworks.bridge.models.studies.StudyIdentifierImpl; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.Sets; public class UserSessionInfoTest { @Test public void userSessionInfoSerializesCorrectly() throws Exception { User user = new User(); user.setConsent(false); user.setEmail("[email protected]"); user.setFirstName("first name"); user.setLastName("last name"); user.setHealthCode("healthCode"); user.setId("user-identifier"); user.setRoles(Sets.newHashSet("test_role")); user.setSharingScope(SharingScope.ALL_QUALIFIED_RESEARCHERS); user.setSignedMostRecentConsent(false); user.setStudyKey("study-identifier"); user.setUsername("username"); UserSession session = new UserSession(); session.setAuthenticated(true); session.setEnvironment(Environment.UAT); session.setInternalSessionToken("internal"); session.setSessionToken("external"); session.setStudyIdentifier(new StudyIdentifierImpl("study-identifier")); session.setUser(user); UserSessionInfo info = new UserSessionInfo(session); String json = BridgeObjectMapper.get().writeValueAsString(info); JsonNode node = BridgeObjectMapper.get().readTree(json); assertEquals(session.isAuthenticated(), node.get("authenticated").asBoolean()); assertEquals(user.hasSignedMostRecentConsent(), node.get("signedMostRecentConsent").asBoolean()); assertEquals(user.doesConsent(), node.get("consented").asBoolean()); assertEquals(user.getSharingScope().name(), node.get("sharingScope").asText().toUpperCase()); assertEquals(session.getSessionToken(), node.get("sessionToken").asText()); assertEquals(user.getUsername(), node.get("username").asText()); assertEquals("staging", node.get("environment").asText()); assertEquals("UserSessionInfo", node.get("type").asText()); // ... and no things that shouldn't be there assertEquals(9, node.size()); } }
test/org/sagebionetworks/bridge/models/accounts/UserSessionInfoTest.java
package org.sagebionetworks.bridge.models.accounts; import static org.junit.Assert.*; import java.util.Iterator; import org.junit.Test; import org.sagebionetworks.bridge.config.Environment; import org.sagebionetworks.bridge.dao.ParticipantOption.SharingScope; import org.sagebionetworks.bridge.json.BridgeObjectMapper; import org.sagebionetworks.bridge.models.studies.StudyIdentifierImpl; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.Sets; public class UserSessionInfoTest { @Test public void userSessionInfoSerializesCorrectly() throws Exception { User user = new User(); user.setConsent(false); user.setEmail("[email protected]"); user.setFirstName("first name"); user.setLastName("last name"); user.setHealthCode("healthCode"); user.setId("user-identifier"); user.setRoles(Sets.newHashSet("test_role")); user.setSharingScope(SharingScope.ALL_QUALIFIED_RESEARCHERS); user.setSignedMostRecentConsent(false); user.setStudyKey("study-identifier"); user.setUsername("username"); UserSession session = new UserSession(); session.setAuthenticated(true); session.setEnvironment(Environment.UAT); session.setInternalSessionToken("internal"); session.setSessionToken("external"); session.setStudyIdentifier(new StudyIdentifierImpl("study-identifier")); session.setUser(user); UserSessionInfo info = new UserSessionInfo(session); String json = BridgeObjectMapper.get().writeValueAsString(info); JsonNode node = BridgeObjectMapper.get().readTree(json); assertEquals(session.isAuthenticated(), node.get("authenticated").asBoolean()); assertEquals(user.hasSignedMostRecentConsent(), node.get("signedMostRecentConsent").asBoolean()); assertEquals(user.doesConsent(), node.get("consented").asBoolean()); assertEquals(user.getSharingScope().name(), node.get("sharingScope").asText().toUpperCase()); assertEquals(session.getSessionToken(), node.get("sessionToken").asText()); assertEquals(user.getUsername(), node.get("username").asText()); assertEquals("staging", node.get("environment").asText()); assertEquals("UserSessionInfo", node.get("type").asText()); // ... and no things that shouldn't be there assertEquals(9, node.size()); } }
Remove unused import
test/org/sagebionetworks/bridge/models/accounts/UserSessionInfoTest.java
Remove unused import
Java
apache-2.0
ef6c619705fd3f3c822ffdbe3eaf59015e4c8381
0
nikeshmhr/unitime,sktoo/timetabling-system-,UniTime/unitime,UniTime/unitime,rafati/unitime,rafati/unitime,rafati/unitime,UniTime/unitime,maciej-zygmunt/unitime,sktoo/timetabling-system-,maciej-zygmunt/unitime,sktoo/timetabling-system-,nikeshmhr/unitime,zuzanamullerova/unitime,nikeshmhr/unitime,zuzanamullerova/unitime,maciej-zygmunt/unitime,zuzanamullerova/unitime
package org.unitime.timetable.reports.exam; import java.io.File; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Collection; import java.util.Comparator; import java.util.Iterator; import java.util.TreeSet; import java.util.Vector; import net.sf.cpsolver.ifs.util.ToolBox; import org.unitime.timetable.ApplicationProperties; import org.unitime.timetable.model.BuildingPref; import org.unitime.timetable.model.ClassEvent; import org.unitime.timetable.model.Class_; import org.unitime.timetable.model.CourseOffering; import org.unitime.timetable.model.CourseOfferingReservation; import org.unitime.timetable.model.Department; import org.unitime.timetable.model.DistributionObject; import org.unitime.timetable.model.DistributionPref; import org.unitime.timetable.model.Exam; import org.unitime.timetable.model.ExamOwner; import org.unitime.timetable.model.ExamPeriodPref; import org.unitime.timetable.model.InstrOfferingConfig; import org.unitime.timetable.model.InstructionalOffering; import org.unitime.timetable.model.ItypeDesc; import org.unitime.timetable.model.Meeting; import org.unitime.timetable.model.Preference; import org.unitime.timetable.model.PreferenceLevel; import org.unitime.timetable.model.RoomFeaturePref; import org.unitime.timetable.model.RoomGroupPref; import org.unitime.timetable.model.RoomPref; import org.unitime.timetable.model.SchedulingSubpart; import org.unitime.timetable.model.Session; import org.unitime.timetable.model.SubjectArea; import org.unitime.timetable.model.comparators.CourseOfferingComparator; import org.unitime.timetable.model.comparators.SchedulingSubpartComparator; import org.unitime.timetable.model.dao.SessionDAO; import org.unitime.timetable.model.dao._RootDAO; import org.unitime.timetable.solver.exam.ui.ExamAssignmentInfo; import org.unitime.timetable.solver.exam.ui.ExamRoomInfo; import org.unitime.timetable.solver.exam.ui.ExamInfo.ExamSectionInfo; import com.lowagie.text.DocumentException; public class ExamVerificationReport extends PdfLegacyExamReport { private CourseOffering iCourseOffering = null; private boolean iSkipHoles = true; private boolean iHasAssignment = false; public ExamVerificationReport(int mode, File file, Session session, int examType, SubjectArea subjectArea, Collection<ExamAssignmentInfo> exams) throws IOException, DocumentException { super(mode, file, "EXAMINATION VERIFICATION REPORT", session, examType, subjectArea, exams); for (ExamAssignmentInfo exam : exams) { if (exam.getPeriod()!=null) { iHasAssignment = true; break; } } } public TreeSet<ExamAssignmentInfo> getExams(CourseOffering course) { TreeSet<ExamAssignmentInfo> exams = new TreeSet(); for (ExamAssignmentInfo exam : getExams()) { for (ExamSectionInfo section : exam.getSections()) { if (section.getOwnerType()==ExamOwner.sOwnerTypeCourse && section.getOwnerId().equals(course.getUniqueId())) exams.add(exam); if (section.getOwnerType()==ExamOwner.sOwnerTypeOffering && section.getOwnerId().equals(course.getInstructionalOffering().getUniqueId())) exams.add(exam); } } return exams; } public TreeSet<ExamAssignmentInfo> getExams(Class_ clazz) { TreeSet<ExamAssignmentInfo> exams = new TreeSet(); for (ExamAssignmentInfo exam : getExams()) { for (ExamSectionInfo section : exam.getSections()) { if (section.getOwnerType()==ExamOwner.sOwnerTypeClass && section.getOwnerId().equals(clazz.getUniqueId())) exams.add(exam); if (section.getOwnerType()==ExamOwner.sOwnerTypeConfig && section.getOwnerId().equals(clazz.getSchedulingSubpart().getInstrOfferingConfig().getUniqueId())) exams.add(exam); } } return exams; } public String genName(String pattern, Class_ clazz) { String name = pattern; int idx = -1; while (name.indexOf('%',idx+1)>=0) { idx = name.indexOf('%',idx); char code = name.charAt(idx+1); String name4code = genName(code, clazz); name = name.substring(0,idx)+(name4code==null?"":name4code)+name.substring(idx+2); } return name; } protected String genName(char code, Class_ clazz) { switch (code) { case '_' : return " "; case 's' : return clazz.getSchedulingSubpart().getControllingCourseOffering().getSubjectArea().getSubjectAreaAbbreviation(); case 'c' : return clazz.getSchedulingSubpart().getControllingCourseOffering().getCourseNbr(); case 'i' : return clazz.getSchedulingSubpart().getItypeDesc().trim(); case 'n' : return clazz.getSectionNumberString(); case 'x' : return clazz.getSchedulingSubpart().getInstrOfferingConfig().getName(); case 'D' : return clazz.getControllingDept().getDeptCode(); case 'd' : Department d = clazz.getControllingDept(); return (d.getAbbreviation()==null || d.getAbbreviation().length()==0?d.getDeptCode():d.getAbbreviation()); case 'a' : return clazz.getClassSuffix(); case 'y' : return clazz.getSchedulingSubpart().getSchedulingSubpartSuffix(); case 'e' : return clazz.getExternalUniqueId(); case 'f' : return clazz.getSchedulingSubpart().getControllingCourseOffering().getExternalUniqueId(); case 'o' : return clazz.getSchedulingSubpart().getControllingCourseOffering().getInstructionalOffering().getExternalUniqueId(); case 't' : return ""; case 'I' : return clazz.getSchedulingSubpart().getItype().getItype().toString(); case 'p' : ItypeDesc itype = clazz.getSchedulingSubpart().getItype(); while (itype.getParent()!=null) itype = itype.getParent(); return itype.getAbbv(); case 'P' : itype = clazz.getSchedulingSubpart().getItype(); while (itype.getParent()!=null) itype = itype.getParent(); return itype.getItype().toString(); } return ""; } public String getMeetWith(Class_ clazz, Vector<Class_> exclude) { TreeSet<Class_> classes = new TreeSet(new Comparator<Class_>() { public int compare(Class_ c1, Class_ c2) { if (c1.getSchedulingSubpart().equals(c2.getSchedulingSubpart())) { String sx1 = (iUseClassSuffix?c1.getClassSuffix():c1.getSectionNumberString()); String sx2 = (iUseClassSuffix?c2.getClassSuffix():c2.getSectionNumberString()); if (sx1!=null && sx2!=null) return sx1.compareTo(sx2); return c1.getSectionNumber().compareTo(c2.getSectionNumber()); } return new SchedulingSubpartComparator().compare(c1.getSchedulingSubpart(), c2.getSchedulingSubpart()); } }); for (Iterator i=clazz.getDistributionObjects().iterator();i.hasNext();) { DistributionObject dObj = (DistributionObject)i.next(); if (!"MEET_WITH".equals(dObj.getDistributionPref().getDistributionType().getReference())) continue; for (Iterator j=dObj.getDistributionPref().getDistributionObjects().iterator();j.hasNext();) { DistributionObject xObj = (DistributionObject)j.next(); if (exclude!=null && exclude.contains(xObj.getPrefGroup())) continue; if (xObj.getPrefGroup() instanceof Class_) { classes.add((Class_)xObj.getPrefGroup()); } else { classes.addAll(((SchedulingSubpart)xObj.getPrefGroup()).getClasses()); } } } if (classes.isEmpty()) return ""; Class_ prev = clazz; String ret = ""; for (Class_ c : classes) { if (ret.length()==0) ret+=genName(ApplicationProperties.getProperty("tmtbl.exam.name.Class"),c); else if (prev.getSchedulingSubpart().getControllingCourseOffering().getSubjectArea().equals(c.getSchedulingSubpart().getControllingCourseOffering().getSubjectArea())) { //same subject area if (prev.getSchedulingSubpart().getControllingCourseOffering().equals(c.getSchedulingSubpart().getControllingCourseOffering())) { //same course number if (prev.getSchedulingSubpart().equals(c.getSchedulingSubpart())) ret+=genName(ApplicationProperties.getProperty("tmtbl.exam.name.sameSubpart.Class"),c); else ret+=genName(ApplicationProperties.getProperty("tmtbl.exam.name.sameCourse.Class"),c); } else { //different course number ret+=genName(ApplicationProperties.getProperty("tmtbl.exam.name.sameSubject.Class"),c); } } else { ret+=genName(ApplicationProperties.getProperty("tmtbl.exam.name.diffSubject.separator"),prev); ret+=genName(ApplicationProperties.getProperty("tmtbl.exam.name.Class"),c); } prev = c; } return ret; } private String formatSection(Class_ clazz) { return (!iUseClassSuffix || clazz.getClassSuffix()==null || clazz.getClassSuffix().length()==0?clazz.getSectionNumberString():clazz.getClassSuffix()); } private String formatSection(Vector<Class_> classes) { if (classes.isEmpty()) return ""; if (classes.size()==1) return formatSection(classes.firstElement()); return formatSection(classes.firstElement())+" - "+formatSection(classes.lastElement()); } private boolean sameExams(TreeSet<ExamAssignmentInfo> x1, TreeSet<ExamAssignmentInfo> x2) { if (x1.equals(x2)) return true; if (x1.size()!=x2.size()) return false; return false; } public String getMessage(Class_ clazz, boolean hasCourseExam, boolean hasSectionExam) { TreeSet<ExamAssignmentInfo> exams = getExams(clazz); if (!exams.isEmpty()) return ""; String message = "** NO EXAM **"; if (hasCourseExam && !hasSectionExam) message = ""; // Has other exam else if (!hasSectionExam && !clazz.getSchedulingSubpart().getItype().isOrganized()) message = "Not organized instructional type"; else { if (clazz.getEvent()==null || clazz.getEvent().getMeetings().isEmpty()) { message = "Class not organized"; } else if (!isFullTerm(clazz.getEvent())) { TreeSet meetings = new TreeSet(clazz.getEvent().getMeetings()); Meeting first = (Meeting)meetings.first(); Meeting last = (Meeting)meetings.last(); SimpleDateFormat df = new SimpleDateFormat("MM/dd"); message = "Class not full-term ("+df.format(first.getMeetingDate())+(first.getMeetingDate().equals(last.getMeetingDate())?"":" - "+df.format(last.getMeetingDate()))+")"; } } return message; } private void print(Vector<Class_> same, boolean hasCourseExam, boolean hasSectionExam, int minLimit, int maxLimit, int minEnrl, int maxEnrl) throws DocumentException { String cmw = getMeetWith(same.firstElement(),same); TreeSet<ExamAssignmentInfo> exams = getExams(same.firstElement()); iPeriodPrinted = false; if (exams.isEmpty()) { String message = "** NO EXAM **"; if (hasCourseExam && !hasSectionExam) message = ""; // Has other exam else if (!hasSectionExam && !same.firstElement().getSchedulingSubpart().getItype().isOrganized()) message = "Not organized instructional type"; else { ClassEvent classEvent = same.firstElement().getEvent(); if (classEvent==null || classEvent.getMeetings().isEmpty()) { message = "Class not organized"; } else if (!isFullTerm(classEvent)) { TreeSet meetings = new TreeSet(classEvent.getMeetings()); Meeting first = (Meeting)meetings.first(); Meeting last = (Meeting)meetings.last(); SimpleDateFormat df = new SimpleDateFormat("MM/dd"); message = "Class not full-term ("+df.format(first.getMeetingDate())+(first.getMeetingDate().equals(last.getMeetingDate())?"":" - "+df.format(last.getMeetingDate()))+")"; } } String title = same.firstElement().getSchedulePrintNote(); /* if (title!=null && title.equals(same.firstElement().getSchedulingSubpart().getControllingCourseOffering().getTitle())) title = null; */ boolean hasTitle = (title!=null && title.trim().length()>0); boolean titleSameLine = hasTitle && (" "+title).length()<=((iDispLimits?28:48)-formatSection(same).length()-(same.size()>1?" ("+same.size()+" classes)":"").length()); boolean titleSeparateLine = hasTitle && !titleSameLine; boolean hasMw = cmw.length()>0; boolean mwSameLine = hasMw && !titleSameLine && (" m/w "+cmw).length()<=((iDispLimits?28:48)-formatSection(same).length()-(same.size()>1?" ("+same.size()+" classes)":"").length()); boolean mwSeparateLine = hasMw && !mwSameLine; if ((titleSeparateLine || mwSeparateLine) && getLineNumber()+1+(titleSeparateLine?0:1)+(mwSeparateLine?1:0)>iNrLines) newPage(); println( lpad(iITypePrinted?"":same.firstElement().getSchedulingSubpart().getItypeDesc(),11)+" "+ rpad(formatSection(same)+(same.size()>1?" ("+same.size()+" classes)":"")+ (titleSameLine?" "+title:"")+(mwSameLine?" m/w "+cmw:""),(iDispLimits?28:48))+" "+ (iDispLimits?lpad(maxLimit<=0?"":minLimit!=maxLimit?minLimit+"-"+maxLimit:""+minLimit,9)+" "+ lpad(maxEnrl<=0?"":minEnrl!=maxEnrl?minEnrl+"-"+maxEnrl:""+minEnrl,9)+" ":"")+ " "+message); if (titleSeparateLine) println(lpad("",11)+" "+title); if (mwSeparateLine) println(lpad("",11)+" Meets with "+cmw); iITypePrinted = !iNewPage; } else for (ExamAssignmentInfo exam : exams) { Vector<String> rooms = new Vector(); Vector<String> times = new Vector(); if (exam.getPeriod()==null) { times.add(rpad(iHasAssignment?" Exam not assigned":" Section exam",26)); rooms.add(rpad("", 23)); //if (exam.getMaxRooms()==0) rooms.add(" "+rpad(iNoRoom, 22)); for (Iterator i=new TreeSet(exam.getExam().getPreferences()).iterator();i.hasNext();) { Preference pref = (Preference)i.next(); if (PreferenceLevel.sRequired.equals(pref.getPrefLevel().getPrefProlog()) || PreferenceLevel.sProhibited.equals(pref.getPrefLevel().getPrefProlog())) { String pf = (PreferenceLevel.sRequired.equals(pref.getPrefLevel().getPrefProlog())?" ":"!"); if (pref instanceof ExamPeriodPref) { ExamPeriodPref xp = (ExamPeriodPref)pref; times.add(pf+rpad(formatPeriod(xp.getExamPeriod()), 25)); } else if (exam.getMaxRooms()>0) { if (pref instanceof RoomPref) { RoomPref rp = (RoomPref)pref; rooms.add(pf+formatRoom(rp.getRoom().getLabel())+" "+ lpad(""+rp.getRoom().getCapacity(),4)+" "+ lpad(""+rp.getRoom().getExamCapacity(),5)); } else if (pref instanceof BuildingPref) { BuildingPref bp = (BuildingPref)pref; rooms.add(pf+rpad(bp.getBuilding().getAbbreviation(), 22)); } else if (pref instanceof RoomFeaturePref) { RoomFeaturePref fp = (RoomFeaturePref)pref; rooms.add(pf+rpad(fp.getRoomFeature().getLabel(), 22)); } else if (pref instanceof RoomGroupPref) { RoomGroupPref gp = (RoomGroupPref)pref; rooms.add(pf+rpad(gp.getRoomGroup().getName(), 22)); } } } } for (Iterator i=exam.getExam().getDistributionObjects().iterator();i.hasNext();) { DistributionObject dObj = (DistributionObject)i.next(); DistributionPref pref = dObj.getDistributionPref(); if (!PreferenceLevel.sRequired.equals(pref.getPrefLevel().getPrefProlog()) && !PreferenceLevel.sProhibited.equals(pref.getPrefLevel().getPrefProlog())) continue; int line = 0; String name = (PreferenceLevel.sRequired.equals(pref.getPrefLevel().getPrefProlog())?" ":"!")+pref.getDistributionType().getAbbreviation(); if (name.toUpperCase().startsWith("!SAME ")) name = " Diff"+name.substring(5); for (Iterator j=new TreeSet(pref.getDistributionObjects()).iterator();j.hasNext();) { DistributionObject xObj = (DistributionObject)j.next(); if (xObj.equals(dObj)) continue; Exam x = (Exam)xObj.getPrefGroup(); for (Iterator k=new TreeSet(x.getOwners()).iterator();k.hasNext();) { ExamOwner own = (ExamOwner)k.next(); times.add(rpad(rpad(line>0?"":name,name.length())+" "+own.getLabel(),26)); line++; } } } } else { if (exam.getRooms()==null || exam.getRooms().isEmpty()) { rooms.add(" "+rpad(iNoRoom, 22)); } else for (ExamRoomInfo room : exam.getRooms()) { rooms.add(" "+formatRoom(room.getName())+" "+ lpad(""+room.getCapacity(),4)+" "+ lpad(""+room.getExamCapacity(),5)); } times.add(" "+rpad(formatPeriod(exam.getPeriod()),25)); } Vector<String> meetsWith = new Vector(); int cnt = 0; int maxCnt = Math.max(4,Math.max(rooms.size(), times.size())-1); for (ExamSectionInfo section : exam.getSections()) { if (section.getOwnerType()==ExamOwner.sOwnerTypeClass && same.contains(section.getOwner().getOwnerObject())) continue; if (section.getOwnerType()==ExamOwner.sOwnerTypeConfig && section.getOwnerId().equals(same.firstElement().getSchedulingSubpart().getInstrOfferingConfig().getUniqueId())) continue; if (cnt>=maxCnt) { meetsWith.add(" "+rpad("...",14)); break; } if (iItype) meetsWith.add(" "+rpad(section.getName(),14)); else meetsWith.add(" "+ rpad(section.getSubject(),4)+" "+ rpad(section.getCourseNbr(),5)+" "+ rpad(section.getSection(),3)); cnt++; } int nrLines = Math.max(Math.max(rooms.size(), meetsWith.size()),times.size()); String title = same.firstElement().getSchedulePrintNote(); /* if (title!=null && title.equals(same.firstElement().getSchedulingSubpart().getControllingCourseOffering().getTitle())) title = null; */ boolean hasTitle = !iPeriodPrinted && (title!=null && title.trim().length()>0); boolean titleSameLine = hasTitle && (" "+title).length()<=((iDispLimits?28:48)-formatSection(same).length()-(same.size()>1?" ("+same.size()+" classes)":"").length()); boolean titleSecondLine = hasTitle && !titleSameLine && nrLines>1 && (" "+title).length()<=(iDispLimits?28:48); boolean titleSeparateLine = hasTitle && !titleSameLine && !titleSecondLine; boolean hasMw = !iPeriodPrinted && cmw.length()>0; boolean mwSameLine = hasMw && !titleSameLine && (" m/w "+cmw).length()<=((iDispLimits?28:48)-formatSection(same).length()-(same.size()>1?" ("+same.size()+" classes)":"").length()); boolean mwSecondLine = hasMw && !mwSameLine && !titleSecondLine && nrLines>1 && (" Meets with "+cmw).length()<=(iDispLimits?28:48); boolean mwThirdLine = hasMw && !mwSameLine && titleSecondLine && nrLines>2 && (" Meets with "+cmw).length()<=(iDispLimits?28:48); boolean mwSeparateLine = hasMw && !mwSameLine && !mwSecondLine && !mwThirdLine; if (getLineNumber()+nrLines+(mwSeparateLine?1:0)+(titleSeparateLine?1:0)>iNrLines) newPage(); for (int idx = 0; idx < nrLines; idx++) { String room = (idx<rooms.size()?rooms.elementAt(idx):rpad("",23)); String mw = (idx<meetsWith.size()?meetsWith.elementAt(idx):""); String time = (idx<times.size()?times.elementAt(idx):rpad("",26)); println(lpad(idx>0 || iITypePrinted?"":same.firstElement().getSchedulingSubpart().getItypeDesc(),11)+" "+ rpad(iPeriodPrinted?"":idx>0 ? (idx==1 && mwSecondLine?" Meets with "+cmw:"")+ (idx==1 && titleSecondLine?" "+title:"")+ (idx==2 && mwThirdLine?" Meets with "+cmw:"") : formatSection(same)+(same.size()>1?" ("+same.size()+" classes)":"")+ (titleSameLine?" "+title:"")+ (mwSameLine?" m/w "+cmw:"") ,(iDispLimits?28:48))+" "+ (iDispLimits?lpad(iPeriodPrinted || idx>0 || maxLimit<=0?"":minLimit!=maxLimit?minLimit+"-"+maxLimit:""+minLimit,9)+" "+ lpad(iPeriodPrinted || idx>0 || maxEnrl<=0?"":minEnrl!=maxEnrl?minEnrl+"-"+maxEnrl:""+minEnrl,9)+" ":"")+ lpad(idx>0?"":exam.getSeatingType()==Exam.sSeatingTypeExam?"yes":"no",4)+" "+ lpad(idx>0?"":String.valueOf(exam.getLength()),3)+time+room+mw ); if (idx==0 && titleSeparateLine) println(lpad("",11)+" "+title); if (idx==0 && mwSeparateLine) println(lpad("",11)+" Meets with "+cmw); } iITypePrinted = iPeriodPrinted = !iNewPage; } } public void printReport() throws DocumentException { System.out.println("Loading courses ..."); TreeSet<CourseOffering> allCourses = new TreeSet(new Comparator<CourseOffering>() { public int compare(CourseOffering co1, CourseOffering co2) { int cmp = co1.getSubjectAreaAbbv().compareTo(co2.getSubjectAreaAbbv()); if (cmp!=0) return cmp; cmp = co1.getCourseNbr().compareTo(co2.getCourseNbr()); if (cmp!=0) return cmp; return co1.getUniqueId().compareTo(co2.getUniqueId()); } }); if (getSubjectArea()!=null) allCourses.addAll(new SessionDAO().getSession(). createQuery("select co from CourseOffering co where co.subjectArea.uniqueId=:subjectAreaId"). setLong("subjectAreaId", getSubjectArea().getUniqueId()).list()); else allCourses.addAll(new SessionDAO().getSession(). createQuery("select co from CourseOffering co where co.subjectArea.session.uniqueId=:sessionId"). setLong("sessionId", getSession().getUniqueId()).list()); if (allCourses.isEmpty()) return; System.out.println("Printing report ..."); SubjectArea subject = null; setHeader(new String[] { "Course Title "+(iDispLimits?" ":" ")+" Alt Len ", " InsType Sections "+(iDispLimits?" Limit Enrollmnt":" ")+" Seat ght Date & Time Room Cap ExCap Exam with", "----------- ----------------------------"+(iDispLimits?" --------- ---------":"--------------------")+" ---- --- ------------------------- ----------- ---- ----- --------------"}); printHeader(); for (CourseOffering co : allCourses) { InstructionalOffering io = co.getInstructionalOffering(); if (!co.isIsControl() && co.getInstructionalOffering().getControllingCourseOffering().getSubjectArea().equals(co.getSubjectArea())) continue; if (subject==null) { subject = co.getSubjectArea(); setFooter(subject.getSubjectAreaAbbreviation()); } else if (!subject.equals(co.getSubjectArea())) { subject = co.getSubjectArea(); newPage(); setFooter(subject.getSubjectAreaAbbreviation()); } setPageName(co.getCourseName()); setCont(co.getCourseName()); TreeSet<CourseOffering> courses = new TreeSet(new CourseOfferingComparator(CourseOfferingComparator.COMPARE_BY_CTRL_CRS)); courses.addAll(co.getInstructionalOffering().getCourseOfferings()); boolean hasCourseExam = false; for (CourseOffering course : courses) { if (!getExams(course).isEmpty()) { hasCourseExam = true; break; } } for (CourseOffering course : courses) { int courseLimit = -1; InstructionalOffering offering = course.getInstructionalOffering(); boolean unlimited = false; for (Iterator i=offering.getCourseReservations().iterator();i.hasNext();) { CourseOfferingReservation r = (CourseOfferingReservation)i.next(); if (r.getCourseOffering().equals(co)) courseLimit = r.getReserved().intValue(); } if (courseLimit<0) { if (offering.getCourseOfferings().size()==1 && offering.getLimit()!=null) courseLimit = offering.getLimit().intValue(); } for (Iterator i=offering.getInstrOfferingConfigs().iterator();i.hasNext();) { InstrOfferingConfig config = (InstrOfferingConfig)i.next(); if (config.isUnlimitedEnrollment().booleanValue()) unlimited=true; } int enrl = ((Number)new _RootDAO().getSession().createQuery( "select count(*) from StudentClassEnrollment s where s.courseOffering.uniqueId=:courseId") .setLong("courseId", course.getUniqueId()).uniqueResult()).intValue(); TreeSet<ExamAssignmentInfo> exams = getExams(course); String courseName = (course.isIsControl()?"":" ")+course.getCourseName(); iCoursePrinted = false; if (exams.isEmpty()) { println( rpad(courseName,11)+" "+ rpad(course.getTitle()==null?"":course.getTitle(),(iDispLimits?28:48))+" "+ (iDispLimits?lpad(courseLimit<=0?unlimited?" inf":"":String.valueOf(courseLimit),9)+" "+lpad(enrl<=0?"":String.valueOf(enrl),9)+" ":"")+ " "+(hasCourseExam?"** NO EXAM**":"")); } else for (ExamAssignmentInfo exam : exams) { Vector<String> rooms = new Vector(); Vector<String> times = new Vector(); if (exam.getPeriod()==null) { times.add(rpad(iHasAssignment?" Exam not assigned":" Course Exam",26)); rooms.add(rpad("", 23)); //if (exam.getMaxRooms()==0) rooms.add(" "+rpad(iNoRoom, 22)); for (Iterator i=new TreeSet(exam.getExam().getPreferences()).iterator();i.hasNext();) { Preference pref = (Preference)i.next(); if (PreferenceLevel.sRequired.equals(pref.getPrefLevel().getPrefProlog()) || PreferenceLevel.sProhibited.equals(pref.getPrefLevel().getPrefProlog())) { String pf = (PreferenceLevel.sRequired.equals(pref.getPrefLevel().getPrefProlog())?" ":"!"); if (pref instanceof ExamPeriodPref) { ExamPeriodPref xp = (ExamPeriodPref)pref; times.add(pf+rpad(formatPeriod(xp.getExamPeriod()), 25)); } else if (exam.getMaxRooms()>0) { if (pref instanceof RoomPref) { RoomPref rp = (RoomPref)pref; rooms.add(pf+formatRoom(rp.getRoom().getLabel())+" "+ lpad(""+rp.getRoom().getCapacity(),4)+" "+ lpad(""+rp.getRoom().getExamCapacity(),5)); } else if (pref instanceof BuildingPref) { BuildingPref bp = (BuildingPref)pref; rooms.add(pf+rpad(bp.getBuilding().getAbbreviation(), 22)); } else if (pref instanceof RoomFeaturePref) { RoomFeaturePref fp = (RoomFeaturePref)pref; rooms.add(pf+rpad(fp.getRoomFeature().getLabel(), 22)); } else if (pref instanceof RoomGroupPref) { RoomGroupPref gp = (RoomGroupPref)pref; rooms.add(pf+rpad(gp.getRoomGroup().getName(), 22)); } } } } for (Iterator i=exam.getExam().getDistributionObjects().iterator();i.hasNext();) { DistributionObject dObj = (DistributionObject)i.next(); DistributionPref pref = dObj.getDistributionPref(); if (!PreferenceLevel.sRequired.equals(pref.getPrefLevel().getPrefProlog()) && !PreferenceLevel.sProhibited.equals(pref.getPrefLevel().getPrefProlog())) continue; String name = (PreferenceLevel.sRequired.equals(pref.getPrefLevel().getPrefProlog())?" ":"!")+pref.getDistributionType().getAbbreviation(); if (name.toUpperCase().startsWith("!SAME ")) name = " Diff"+name.substring(5); int line = 0; for (Iterator j=new TreeSet(pref.getDistributionObjects()).iterator();j.hasNext();) { DistributionObject xObj = (DistributionObject)j.next(); if (xObj.equals(dObj)) continue; Exam x = (Exam)xObj.getPrefGroup(); for (Iterator k=new TreeSet(x.getOwners()).iterator();k.hasNext();) { ExamOwner own = (ExamOwner)k.next(); times.add(rpad(rpad(line>0?"":name,name.length())+" "+own.getLabel(),26)); line++; } } } } else { if (exam.getRooms()==null || exam.getRooms().isEmpty()) { rooms.add(" "+rpad(iNoRoom, 22)); } else for (ExamRoomInfo room : exam.getRooms()) { rooms.add(" "+formatRoom(room.getName())+" "+ lpad(""+room.getCapacity(),4)+" "+ lpad(""+room.getExamCapacity(),5)); } times.add(" "+rpad(formatPeriod(exam.getPeriod()),25)); } Vector<String> meetsWith = new Vector(); int cnt = 0; int maxCnt = Math.max(4,Math.max(rooms.size(), times.size())-1); for (ExamSectionInfo section : exam.getSections()) { if (section.getOwnerType()==ExamOwner.sOwnerTypeCourse && course.getUniqueId().equals(section.getOwnerId())) continue; if (section.getOwnerType()==ExamOwner.sOwnerTypeOffering && course.getInstructionalOffering().getUniqueId().equals(section.getOwnerId())) continue; if (cnt>=maxCnt) { meetsWith.add(" "+rpad("...",14)); break; } if (iItype) meetsWith.add(" "+rpad(section.getName(),14)); else meetsWith.add(" "+ rpad(section.getSubject(),4)+" "+ rpad(section.getCourseNbr(),5)+" "+ rpad(section.getSection(),3)); cnt++; } int nrLines = Math.max(Math.max(rooms.size(), meetsWith.size()),times.size()); for (int idx = 0; idx < nrLines; idx++) { String room = (idx<rooms.size()?rooms.elementAt(idx):rpad("",23)); String mw = (idx<meetsWith.size()?meetsWith.elementAt(idx):""); String time = (idx<times.size()?times.elementAt(idx):rpad("",26)); println(rpad(idx>0 || iCoursePrinted?"":courseName,11)+" "+ rpad(idx>0 || iCoursePrinted?"":course.getTitle()==null?"":course.getTitle(),(iDispLimits?28:48))+" "+ (iDispLimits?lpad(idx>0 || iCoursePrinted?"":courseLimit<=0?unlimited?" inf":"":String.valueOf(courseLimit),9)+" "+ lpad(idx>0 || iCoursePrinted || enrl<=0?"":String.valueOf(enrl),9)+" ":"")+ lpad(idx>0?"":exam.getSeatingType()==Exam.sSeatingTypeExam?"yes":"no",4)+" "+ lpad(idx>0?"":String.valueOf(exam.getLength()),3)+ time+room+mw ); } iCoursePrinted = !iNewPage; } } TreeSet<SchedulingSubpart> subparts = new TreeSet(new SchedulingSubpartComparator()); for (Iterator i=co.getInstructionalOffering().getInstrOfferingConfigs().iterator();i.hasNext();) { InstrOfferingConfig cfg = (InstrOfferingConfig)i.next(); subparts.addAll(cfg.getSchedulingSubparts()); } boolean hasSubpartExam = false; InstrOfferingConfig cfg = null; for (SchedulingSubpart subpart : subparts) { if (cfg==null) { cfg = subpart.getInstrOfferingConfig(); } else if (!cfg.equals(subpart.getInstrOfferingConfig())) { cfg = subpart.getInstrOfferingConfig(); hasSubpartExam = false; } iITypePrinted = false; TreeSet<Class_> classes = new TreeSet(new Comparator<Class_>() { public int compare(Class_ c1, Class_ c2) { if (iUseClassSuffix) { String sx1 = c1.getClassSuffix(); String sx2 = c2.getClassSuffix(); if (sx1!=null && sx2!=null) return sx1.compareTo(sx2); } return c1.getSectionNumber().compareTo(c2.getSectionNumber()); } }); classes.addAll(subpart.getClasses()); String mw = null; String message = null; TreeSet<ExamAssignmentInfo> exams = null; int minEnrl = 0, maxEnrl = 0, minLimit = 0, maxLimit = 0; Vector<Class_> same = new Vector(); boolean hasSectionExam = false, allSectionsHaveExam = true; for (Class_ clazz : classes) { if (!getExams(clazz).isEmpty()) { hasSectionExam = true; } else { allSectionsHaveExam = false; } } if (allSectionsHaveExam && classes.size()>1) hasSubpartExam = true; if (allSectionsHaveExam) hasCourseExam = true; for (Class_ clazz : classes) { int enrl = ((Number)new _RootDAO().getSession().createQuery( "select count(*) from StudentClassEnrollment s where s.clazz.uniqueId=:classId") .setLong("classId", clazz.getUniqueId()).uniqueResult()).intValue(); if (!same.isEmpty() && (iSkipHoles || same.lastElement().getSectionNumber()+1==clazz.getSectionNumber()) && ToolBox.equals(clazz.getSchedulePrintNote(), same.lastElement().getSchedulePrintNote()) && exams.equals(getExams(clazz)) && mw.equals(getMeetWith(clazz, null)) && message.equals(getMessage(clazz, hasCourseExam, hasSectionExam))) { minEnrl = Math.min(minEnrl, enrl); maxEnrl = Math.max(maxEnrl, enrl); minLimit = Math.min(minLimit, clazz.getClassLimit()); maxLimit = Math.max(maxLimit, clazz.getClassLimit()); message = getMessage(clazz, hasCourseExam, hasSectionExam); same.add(clazz); continue; } if (!same.isEmpty()) { print(same, hasCourseExam, hasSectionExam, minLimit, maxLimit, minEnrl, maxEnrl); same.clear(); } exams = getExams(clazz); mw = getMeetWith(clazz, null); minEnrl = maxEnrl = enrl; minLimit = maxLimit = clazz.getClassLimit(); message = getMessage(clazz, hasCourseExam, hasSectionExam); same.add(clazz); } if (!same.isEmpty()) print(same, hasCourseExam || hasSubpartExam, hasSectionExam, minLimit, maxLimit, minEnrl, maxEnrl); } if (!iNewPage) println(""); } } }
JavaSource/org/unitime/timetable/reports/exam/ExamVerificationReport.java
package org.unitime.timetable.reports.exam; import java.io.File; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Collection; import java.util.Comparator; import java.util.Iterator; import java.util.Locale; import java.util.TreeSet; import java.util.Vector; import net.sf.cpsolver.ifs.util.ToolBox; import org.unitime.timetable.ApplicationProperties; import org.unitime.timetable.model.BuildingPref; import org.unitime.timetable.model.Class_; import org.unitime.timetable.model.CourseOffering; import org.unitime.timetable.model.CourseOfferingReservation; import org.unitime.timetable.model.Department; import org.unitime.timetable.model.DistributionObject; import org.unitime.timetable.model.DistributionPref; import org.unitime.timetable.model.Event; import org.unitime.timetable.model.Exam; import org.unitime.timetable.model.ExamOwner; import org.unitime.timetable.model.ExamPeriodPref; import org.unitime.timetable.model.InstrOfferingConfig; import org.unitime.timetable.model.InstructionalOffering; import org.unitime.timetable.model.ItypeDesc; import org.unitime.timetable.model.Meeting; import org.unitime.timetable.model.Preference; import org.unitime.timetable.model.PreferenceLevel; import org.unitime.timetable.model.RoomFeaturePref; import org.unitime.timetable.model.RoomGroupPref; import org.unitime.timetable.model.RoomPref; import org.unitime.timetable.model.SchedulingSubpart; import org.unitime.timetable.model.Session; import org.unitime.timetable.model.SubjectArea; import org.unitime.timetable.model.comparators.CourseOfferingComparator; import org.unitime.timetable.model.comparators.SchedulingSubpartComparator; import org.unitime.timetable.model.dao.SessionDAO; import org.unitime.timetable.model.dao._RootDAO; import org.unitime.timetable.solver.exam.ui.ExamAssignmentInfo; import org.unitime.timetable.solver.exam.ui.ExamRoomInfo; import org.unitime.timetable.solver.exam.ui.ExamInfo.ExamSectionInfo; import com.lowagie.text.DocumentException; public class ExamVerificationReport extends PdfLegacyExamReport { private CourseOffering iCourseOffering = null; private boolean iSkipHoles = true; private boolean iHasAssignment = false; public ExamVerificationReport(int mode, File file, Session session, int examType, SubjectArea subjectArea, Collection<ExamAssignmentInfo> exams) throws IOException, DocumentException { super(mode, file, "EXAMINATION VERIFICATION REPORT", session, examType, subjectArea, exams); for (ExamAssignmentInfo exam : exams) { if (exam.getPeriod()!=null) { iHasAssignment = true; break; } } } public TreeSet<ExamAssignmentInfo> getExams(CourseOffering course) { TreeSet<ExamAssignmentInfo> exams = new TreeSet(); for (ExamAssignmentInfo exam : getExams()) { for (ExamSectionInfo section : exam.getSections()) { if (section.getOwnerType()==ExamOwner.sOwnerTypeCourse && section.getOwnerId().equals(course.getUniqueId())) exams.add(exam); if (section.getOwnerType()==ExamOwner.sOwnerTypeOffering && section.getOwnerId().equals(course.getInstructionalOffering().getUniqueId())) exams.add(exam); } } return exams; } public TreeSet<ExamAssignmentInfo> getExams(Class_ clazz) { TreeSet<ExamAssignmentInfo> exams = new TreeSet(); for (ExamAssignmentInfo exam : getExams()) { for (ExamSectionInfo section : exam.getSections()) { if (section.getOwnerType()==ExamOwner.sOwnerTypeClass && section.getOwnerId().equals(clazz.getUniqueId())) exams.add(exam); if (section.getOwnerType()==ExamOwner.sOwnerTypeConfig && section.getOwnerId().equals(clazz.getSchedulingSubpart().getInstrOfferingConfig().getUniqueId())) exams.add(exam); } } return exams; } public String genName(String pattern, Class_ clazz) { String name = pattern; int idx = -1; while (name.indexOf('%',idx+1)>=0) { idx = name.indexOf('%',idx); char code = name.charAt(idx+1); String name4code = genName(code, clazz); name = name.substring(0,idx)+(name4code==null?"":name4code)+name.substring(idx+2); } return name; } protected String genName(char code, Class_ clazz) { switch (code) { case '_' : return " "; case 's' : return clazz.getSchedulingSubpart().getControllingCourseOffering().getSubjectArea().getSubjectAreaAbbreviation(); case 'c' : return clazz.getSchedulingSubpart().getControllingCourseOffering().getCourseNbr(); case 'i' : return clazz.getSchedulingSubpart().getItypeDesc().trim(); case 'n' : return clazz.getSectionNumberString(); case 'x' : return clazz.getSchedulingSubpart().getInstrOfferingConfig().getName(); case 'D' : return clazz.getControllingDept().getDeptCode(); case 'd' : Department d = clazz.getControllingDept(); return (d.getAbbreviation()==null || d.getAbbreviation().length()==0?d.getDeptCode():d.getAbbreviation()); case 'a' : return clazz.getClassSuffix(); case 'y' : return clazz.getSchedulingSubpart().getSchedulingSubpartSuffix(); case 'e' : return clazz.getExternalUniqueId(); case 'f' : return clazz.getSchedulingSubpart().getControllingCourseOffering().getExternalUniqueId(); case 'o' : return clazz.getSchedulingSubpart().getControllingCourseOffering().getInstructionalOffering().getExternalUniqueId(); case 't' : return ""; case 'I' : return clazz.getSchedulingSubpart().getItype().getItype().toString(); case 'p' : ItypeDesc itype = clazz.getSchedulingSubpart().getItype(); while (itype.getParent()!=null) itype = itype.getParent(); return itype.getAbbv(); case 'P' : itype = clazz.getSchedulingSubpart().getItype(); while (itype.getParent()!=null) itype = itype.getParent(); return itype.getItype().toString(); } return ""; } public boolean isFullTerm(Event classEvent) { if (classEvent!=null && !classEvent.getMeetings().isEmpty()) { TreeSet meetings = new TreeSet(classEvent.getMeetings()); Meeting first = (Meeting)meetings.first(); Meeting last = (Meeting)meetings.last(); Calendar c = Calendar.getInstance(Locale.US); c.setTime(getSession().getSessionBeginDateTime()); c.add(Calendar.WEEK_OF_YEAR, 2); if (first.getMeetingDate().compareTo(c.getTime())>=0) return false; c.setTime(getSession().getClassesEndDateTime()); c.add(Calendar.WEEK_OF_YEAR, -2); if (last.getMeetingDate().compareTo(c.getTime())<=0) return false; return true; } return false; } public String getMeetWith(Class_ clazz, Vector<Class_> exclude) { TreeSet<Class_> classes = new TreeSet(new Comparator<Class_>() { public int compare(Class_ c1, Class_ c2) { if (c1.getSchedulingSubpart().equals(c2.getSchedulingSubpart())) { String sx1 = (iUseClassSuffix?c1.getClassSuffix():c1.getSectionNumberString()); String sx2 = (iUseClassSuffix?c2.getClassSuffix():c2.getSectionNumberString()); if (sx1!=null && sx2!=null) return sx1.compareTo(sx2); return c1.getSectionNumber().compareTo(c2.getSectionNumber()); } return new SchedulingSubpartComparator().compare(c1.getSchedulingSubpart(), c2.getSchedulingSubpart()); } }); for (Iterator i=clazz.getDistributionObjects().iterator();i.hasNext();) { DistributionObject dObj = (DistributionObject)i.next(); if (!"MEET_WITH".equals(dObj.getDistributionPref().getDistributionType().getReference())) continue; for (Iterator j=dObj.getDistributionPref().getDistributionObjects().iterator();j.hasNext();) { DistributionObject xObj = (DistributionObject)j.next(); if (exclude!=null && exclude.contains(xObj.getPrefGroup())) continue; if (xObj.getPrefGroup() instanceof Class_) { classes.add((Class_)xObj.getPrefGroup()); } else { classes.addAll(((SchedulingSubpart)xObj.getPrefGroup()).getClasses()); } } } if (classes.isEmpty()) return ""; Class_ prev = clazz; String ret = ""; for (Class_ c : classes) { if (ret.length()==0) ret+=genName(ApplicationProperties.getProperty("tmtbl.exam.name.Class"),c); else if (prev.getSchedulingSubpart().getControllingCourseOffering().getSubjectArea().equals(c.getSchedulingSubpart().getControllingCourseOffering().getSubjectArea())) { //same subject area if (prev.getSchedulingSubpart().getControllingCourseOffering().equals(c.getSchedulingSubpart().getControllingCourseOffering())) { //same course number if (prev.getSchedulingSubpart().equals(c.getSchedulingSubpart())) ret+=genName(ApplicationProperties.getProperty("tmtbl.exam.name.sameSubpart.Class"),c); else ret+=genName(ApplicationProperties.getProperty("tmtbl.exam.name.sameCourse.Class"),c); } else { //different course number ret+=genName(ApplicationProperties.getProperty("tmtbl.exam.name.sameSubject.Class"),c); } } else { ret+=genName(ApplicationProperties.getProperty("tmtbl.exam.name.diffSubject.separator"),prev); ret+=genName(ApplicationProperties.getProperty("tmtbl.exam.name.Class"),c); } prev = c; } return ret; } private String formatSection(Class_ clazz) { return (!iUseClassSuffix || clazz.getClassSuffix()==null || clazz.getClassSuffix().length()==0?clazz.getSectionNumberString():clazz.getClassSuffix()); } private String formatSection(Vector<Class_> classes) { if (classes.isEmpty()) return ""; if (classes.size()==1) return formatSection(classes.firstElement()); return formatSection(classes.firstElement())+" - "+formatSection(classes.lastElement()); } private boolean sameExams(TreeSet<ExamAssignmentInfo> x1, TreeSet<ExamAssignmentInfo> x2) { if (x1.equals(x2)) return true; if (x1.size()!=x2.size()) return false; return false; } public String getMessage(Class_ clazz, boolean hasCourseExam, boolean hasSectionExam) { TreeSet<ExamAssignmentInfo> exams = getExams(clazz); if (!exams.isEmpty()) return ""; String message = "** NO EXAM **"; if (hasCourseExam && !hasSectionExam) message = ""; // Has other exam else if (!hasSectionExam && !clazz.getSchedulingSubpart().getItype().isOrganized()) message = "Not organized instructional type"; else { if (clazz.getEvent()==null || clazz.getEvent().getMeetings().isEmpty()) { message = "Class not organized"; } else if (!isFullTerm(clazz.getEvent())) { TreeSet meetings = new TreeSet(clazz.getEvent().getMeetings()); Meeting first = (Meeting)meetings.first(); Meeting last = (Meeting)meetings.last(); SimpleDateFormat df = new SimpleDateFormat("MM/dd"); message = "Class not full-term ("+df.format(first.getMeetingDate())+(first.getMeetingDate().equals(last.getMeetingDate())?"":" - "+df.format(last.getMeetingDate()))+")"; } } return message; } private void print(Vector<Class_> same, boolean hasCourseExam, boolean hasSectionExam, int minLimit, int maxLimit, int minEnrl, int maxEnrl) throws DocumentException { String cmw = getMeetWith(same.firstElement(),same); TreeSet<ExamAssignmentInfo> exams = getExams(same.firstElement()); iPeriodPrinted = false; if (exams.isEmpty()) { String message = "** NO EXAM **"; if (hasCourseExam && !hasSectionExam) message = ""; // Has other exam else if (!hasSectionExam && !same.firstElement().getSchedulingSubpart().getItype().isOrganized()) message = "Not organized instructional type"; else { Event classEvent = same.firstElement().getEvent(); if (classEvent==null || classEvent.getMeetings().isEmpty()) { message = "Class not organized"; } else if (!isFullTerm(classEvent)) { TreeSet meetings = new TreeSet(classEvent.getMeetings()); Meeting first = (Meeting)meetings.first(); Meeting last = (Meeting)meetings.last(); SimpleDateFormat df = new SimpleDateFormat("MM/dd"); message = "Class not full-term ("+df.format(first.getMeetingDate())+(first.getMeetingDate().equals(last.getMeetingDate())?"":" - "+df.format(last.getMeetingDate()))+")"; } } String title = same.firstElement().getSchedulePrintNote(); /* if (title!=null && title.equals(same.firstElement().getSchedulingSubpart().getControllingCourseOffering().getTitle())) title = null; */ boolean hasTitle = (title!=null && title.trim().length()>0); boolean titleSameLine = hasTitle && (" "+title).length()<=((iDispLimits?28:48)-formatSection(same).length()-(same.size()>1?" ("+same.size()+" classes)":"").length()); boolean titleSeparateLine = hasTitle && !titleSameLine; boolean hasMw = cmw.length()>0; boolean mwSameLine = hasMw && !titleSameLine && (" m/w "+cmw).length()<=((iDispLimits?28:48)-formatSection(same).length()-(same.size()>1?" ("+same.size()+" classes)":"").length()); boolean mwSeparateLine = hasMw && !mwSameLine; if ((titleSeparateLine || mwSeparateLine) && getLineNumber()+1+(titleSeparateLine?0:1)+(mwSeparateLine?1:0)>iNrLines) newPage(); println( lpad(iITypePrinted?"":same.firstElement().getSchedulingSubpart().getItypeDesc(),11)+" "+ rpad(formatSection(same)+(same.size()>1?" ("+same.size()+" classes)":"")+ (titleSameLine?" "+title:"")+(mwSameLine?" m/w "+cmw:""),(iDispLimits?28:48))+" "+ (iDispLimits?lpad(maxLimit<=0?"":minLimit!=maxLimit?minLimit+"-"+maxLimit:""+minLimit,9)+" "+ lpad(maxEnrl<=0?"":minEnrl!=maxEnrl?minEnrl+"-"+maxEnrl:""+minEnrl,9)+" ":"")+ " "+message); if (titleSeparateLine) println(lpad("",11)+" "+title); if (mwSeparateLine) println(lpad("",11)+" Meets with "+cmw); iITypePrinted = !iNewPage; } else for (ExamAssignmentInfo exam : exams) { Vector<String> rooms = new Vector(); Vector<String> times = new Vector(); if (exam.getPeriod()==null) { times.add(rpad(iHasAssignment?" Exam not assigned":" Section exam",26)); rooms.add(rpad("", 23)); //if (exam.getMaxRooms()==0) rooms.add(" "+rpad(iNoRoom, 22)); for (Iterator i=new TreeSet(exam.getExam().getPreferences()).iterator();i.hasNext();) { Preference pref = (Preference)i.next(); if (PreferenceLevel.sRequired.equals(pref.getPrefLevel().getPrefProlog()) || PreferenceLevel.sProhibited.equals(pref.getPrefLevel().getPrefProlog())) { String pf = (PreferenceLevel.sRequired.equals(pref.getPrefLevel().getPrefProlog())?" ":"!"); if (pref instanceof ExamPeriodPref) { ExamPeriodPref xp = (ExamPeriodPref)pref; times.add(pf+rpad(formatPeriod(xp.getExamPeriod()), 25)); } else if (exam.getMaxRooms()>0) { if (pref instanceof RoomPref) { RoomPref rp = (RoomPref)pref; rooms.add(pf+formatRoom(rp.getRoom().getLabel())+" "+ lpad(""+rp.getRoom().getCapacity(),4)+" "+ lpad(""+rp.getRoom().getExamCapacity(),5)); } else if (pref instanceof BuildingPref) { BuildingPref bp = (BuildingPref)pref; rooms.add(pf+rpad(bp.getBuilding().getAbbreviation(), 22)); } else if (pref instanceof RoomFeaturePref) { RoomFeaturePref fp = (RoomFeaturePref)pref; rooms.add(pf+rpad(fp.getRoomFeature().getLabel(), 22)); } else if (pref instanceof RoomGroupPref) { RoomGroupPref gp = (RoomGroupPref)pref; rooms.add(pf+rpad(gp.getRoomGroup().getName(), 22)); } } } } for (Iterator i=exam.getExam().getDistributionObjects().iterator();i.hasNext();) { DistributionObject dObj = (DistributionObject)i.next(); DistributionPref pref = dObj.getDistributionPref(); if (!PreferenceLevel.sRequired.equals(pref.getPrefLevel().getPrefProlog()) && !PreferenceLevel.sProhibited.equals(pref.getPrefLevel().getPrefProlog())) continue; int line = 0; String name = (PreferenceLevel.sRequired.equals(pref.getPrefLevel().getPrefProlog())?" ":"!")+pref.getDistributionType().getAbbreviation(); if (name.toUpperCase().startsWith("!SAME ")) name = " Diff"+name.substring(5); for (Iterator j=new TreeSet(pref.getDistributionObjects()).iterator();j.hasNext();) { DistributionObject xObj = (DistributionObject)j.next(); if (xObj.equals(dObj)) continue; Exam x = (Exam)xObj.getPrefGroup(); for (Iterator k=new TreeSet(x.getOwners()).iterator();k.hasNext();) { ExamOwner own = (ExamOwner)k.next(); times.add(rpad(rpad(line>0?"":name,name.length())+" "+own.getLabel(),26)); line++; } } } } else { if (exam.getRooms()==null || exam.getRooms().isEmpty()) { rooms.add(" "+rpad(iNoRoom, 22)); } else for (ExamRoomInfo room : exam.getRooms()) { rooms.add(" "+formatRoom(room.getName())+" "+ lpad(""+room.getCapacity(),4)+" "+ lpad(""+room.getExamCapacity(),5)); } times.add(" "+rpad(formatPeriod(exam.getPeriod()),25)); } Vector<String> meetsWith = new Vector(); int cnt = 0; int maxCnt = Math.max(4,Math.max(rooms.size(), times.size())-1); for (ExamSectionInfo section : exam.getSections()) { if (section.getOwnerType()==ExamOwner.sOwnerTypeClass && same.contains(section.getOwner().getOwnerObject())) continue; if (section.getOwnerType()==ExamOwner.sOwnerTypeConfig && section.getOwnerId().equals(same.firstElement().getSchedulingSubpart().getInstrOfferingConfig().getUniqueId())) continue; if (cnt>=maxCnt) { meetsWith.add(" "+rpad("...",14)); break; } if (iItype) meetsWith.add(" "+rpad(section.getName(),14)); else meetsWith.add(" "+ rpad(section.getSubject(),4)+" "+ rpad(section.getCourseNbr(),5)+" "+ rpad(section.getSection(),3)); cnt++; } int nrLines = Math.max(Math.max(rooms.size(), meetsWith.size()),times.size()); String title = same.firstElement().getSchedulePrintNote(); /* if (title!=null && title.equals(same.firstElement().getSchedulingSubpart().getControllingCourseOffering().getTitle())) title = null; */ boolean hasTitle = !iPeriodPrinted && (title!=null && title.trim().length()>0); boolean titleSameLine = hasTitle && (" "+title).length()<=((iDispLimits?28:48)-formatSection(same).length()-(same.size()>1?" ("+same.size()+" classes)":"").length()); boolean titleSecondLine = hasTitle && !titleSameLine && nrLines>1 && (" "+title).length()<=(iDispLimits?28:48); boolean titleSeparateLine = hasTitle && !titleSameLine && !titleSecondLine; boolean hasMw = !iPeriodPrinted && cmw.length()>0; boolean mwSameLine = hasMw && !titleSameLine && (" m/w "+cmw).length()<=((iDispLimits?28:48)-formatSection(same).length()-(same.size()>1?" ("+same.size()+" classes)":"").length()); boolean mwSecondLine = hasMw && !mwSameLine && !titleSecondLine && nrLines>1 && (" Meets with "+cmw).length()<=(iDispLimits?28:48); boolean mwThirdLine = hasMw && !mwSameLine && titleSecondLine && nrLines>2 && (" Meets with "+cmw).length()<=(iDispLimits?28:48); boolean mwSeparateLine = hasMw && !mwSameLine && !mwSecondLine && !mwThirdLine; if (getLineNumber()+nrLines+(mwSeparateLine?1:0)+(titleSeparateLine?1:0)>iNrLines) newPage(); for (int idx = 0; idx < nrLines; idx++) { String room = (idx<rooms.size()?rooms.elementAt(idx):rpad("",23)); String mw = (idx<meetsWith.size()?meetsWith.elementAt(idx):""); String time = (idx<times.size()?times.elementAt(idx):rpad("",26)); println(lpad(idx>0 || iITypePrinted?"":same.firstElement().getSchedulingSubpart().getItypeDesc(),11)+" "+ rpad(iPeriodPrinted?"":idx>0 ? (idx==1 && mwSecondLine?" Meets with "+cmw:"")+ (idx==1 && titleSecondLine?" "+title:"")+ (idx==2 && mwThirdLine?" Meets with "+cmw:"") : formatSection(same)+(same.size()>1?" ("+same.size()+" classes)":"")+ (titleSameLine?" "+title:"")+ (mwSameLine?" m/w "+cmw:"") ,(iDispLimits?28:48))+" "+ (iDispLimits?lpad(iPeriodPrinted || idx>0 || maxLimit<=0?"":minLimit!=maxLimit?minLimit+"-"+maxLimit:""+minLimit,9)+" "+ lpad(iPeriodPrinted || idx>0 || maxEnrl<=0?"":minEnrl!=maxEnrl?minEnrl+"-"+maxEnrl:""+minEnrl,9)+" ":"")+ lpad(idx>0?"":exam.getSeatingType()==Exam.sSeatingTypeExam?"yes":"no",4)+" "+ lpad(idx>0?"":String.valueOf(exam.getLength()),3)+time+room+mw ); if (idx==0 && titleSeparateLine) println(lpad("",11)+" "+title); if (idx==0 && mwSeparateLine) println(lpad("",11)+" Meets with "+cmw); } iITypePrinted = iPeriodPrinted = !iNewPage; } } public void printReport() throws DocumentException { System.out.println("Loading courses ..."); TreeSet<CourseOffering> allCourses = new TreeSet(new Comparator<CourseOffering>() { public int compare(CourseOffering co1, CourseOffering co2) { int cmp = co1.getSubjectAreaAbbv().compareTo(co2.getSubjectAreaAbbv()); if (cmp!=0) return cmp; cmp = co1.getCourseNbr().compareTo(co2.getCourseNbr()); if (cmp!=0) return cmp; return co1.getUniqueId().compareTo(co2.getUniqueId()); } }); if (getSubjectArea()!=null) allCourses.addAll(new SessionDAO().getSession(). createQuery("select co from CourseOffering co where co.subjectArea.uniqueId=:subjectAreaId"). setLong("subjectAreaId", getSubjectArea().getUniqueId()).list()); else allCourses.addAll(new SessionDAO().getSession(). createQuery("select co from CourseOffering co where co.subjectArea.session.uniqueId=:sessionId"). setLong("sessionId", getSession().getUniqueId()).list()); if (allCourses.isEmpty()) return; System.out.println("Printing report ..."); SubjectArea subject = null; setHeader(new String[] { "Course Title "+(iDispLimits?" ":" ")+" Alt Len ", " InsType Sections "+(iDispLimits?" Limit Enrollmnt":" ")+" Seat ght Date & Time Room Cap ExCap Exam with", "----------- ----------------------------"+(iDispLimits?" --------- ---------":"--------------------")+" ---- --- ------------------------- ----------- ---- ----- --------------"}); printHeader(); for (CourseOffering co : allCourses) { InstructionalOffering io = co.getInstructionalOffering(); if (!co.isIsControl() && co.getInstructionalOffering().getControllingCourseOffering().getSubjectArea().equals(co.getSubjectArea())) continue; if (subject==null) { subject = co.getSubjectArea(); setFooter(subject.getSubjectAreaAbbreviation()); } else if (!subject.equals(co.getSubjectArea())) { subject = co.getSubjectArea(); newPage(); setFooter(subject.getSubjectAreaAbbreviation()); } setPageName(co.getCourseName()); setCont(co.getCourseName()); TreeSet<CourseOffering> courses = new TreeSet(new CourseOfferingComparator(CourseOfferingComparator.COMPARE_BY_CTRL_CRS)); courses.addAll(co.getInstructionalOffering().getCourseOfferings()); boolean hasCourseExam = false; for (CourseOffering course : courses) { if (!getExams(course).isEmpty()) { hasCourseExam = true; break; } } for (CourseOffering course : courses) { int courseLimit = -1; InstructionalOffering offering = course.getInstructionalOffering(); boolean unlimited = false; for (Iterator i=offering.getCourseReservations().iterator();i.hasNext();) { CourseOfferingReservation r = (CourseOfferingReservation)i.next(); if (r.getCourseOffering().equals(co)) courseLimit = r.getReserved().intValue(); } if (courseLimit<0) { if (offering.getCourseOfferings().size()==1 && offering.getLimit()!=null) courseLimit = offering.getLimit().intValue(); } for (Iterator i=offering.getInstrOfferingConfigs().iterator();i.hasNext();) { InstrOfferingConfig config = (InstrOfferingConfig)i.next(); if (config.isUnlimitedEnrollment().booleanValue()) unlimited=true; } int enrl = ((Number)new _RootDAO().getSession().createQuery( "select count(*) from StudentClassEnrollment s where s.courseOffering.uniqueId=:courseId") .setLong("courseId", course.getUniqueId()).uniqueResult()).intValue(); TreeSet<ExamAssignmentInfo> exams = getExams(course); String courseName = (course.isIsControl()?"":" ")+course.getCourseName(); iCoursePrinted = false; if (exams.isEmpty()) { println( rpad(courseName,11)+" "+ rpad(course.getTitle()==null?"":course.getTitle(),(iDispLimits?28:48))+" "+ (iDispLimits?lpad(courseLimit<=0?unlimited?" inf":"":String.valueOf(courseLimit),9)+" "+lpad(enrl<=0?"":String.valueOf(enrl),9)+" ":"")+ " "+(hasCourseExam?"** NO EXAM**":"")); } else for (ExamAssignmentInfo exam : exams) { Vector<String> rooms = new Vector(); Vector<String> times = new Vector(); if (exam.getPeriod()==null) { times.add(rpad(iHasAssignment?" Exam not assigned":" Course Exam",26)); rooms.add(rpad("", 23)); //if (exam.getMaxRooms()==0) rooms.add(" "+rpad(iNoRoom, 22)); for (Iterator i=new TreeSet(exam.getExam().getPreferences()).iterator();i.hasNext();) { Preference pref = (Preference)i.next(); if (PreferenceLevel.sRequired.equals(pref.getPrefLevel().getPrefProlog()) || PreferenceLevel.sProhibited.equals(pref.getPrefLevel().getPrefProlog())) { String pf = (PreferenceLevel.sRequired.equals(pref.getPrefLevel().getPrefProlog())?" ":"!"); if (pref instanceof ExamPeriodPref) { ExamPeriodPref xp = (ExamPeriodPref)pref; times.add(pf+rpad(formatPeriod(xp.getExamPeriod()), 25)); } else if (exam.getMaxRooms()>0) { if (pref instanceof RoomPref) { RoomPref rp = (RoomPref)pref; rooms.add(pf+formatRoom(rp.getRoom().getLabel())+" "+ lpad(""+rp.getRoom().getCapacity(),4)+" "+ lpad(""+rp.getRoom().getExamCapacity(),5)); } else if (pref instanceof BuildingPref) { BuildingPref bp = (BuildingPref)pref; rooms.add(pf+rpad(bp.getBuilding().getAbbreviation(), 22)); } else if (pref instanceof RoomFeaturePref) { RoomFeaturePref fp = (RoomFeaturePref)pref; rooms.add(pf+rpad(fp.getRoomFeature().getLabel(), 22)); } else if (pref instanceof RoomGroupPref) { RoomGroupPref gp = (RoomGroupPref)pref; rooms.add(pf+rpad(gp.getRoomGroup().getName(), 22)); } } } } for (Iterator i=exam.getExam().getDistributionObjects().iterator();i.hasNext();) { DistributionObject dObj = (DistributionObject)i.next(); DistributionPref pref = dObj.getDistributionPref(); if (!PreferenceLevel.sRequired.equals(pref.getPrefLevel().getPrefProlog()) && !PreferenceLevel.sProhibited.equals(pref.getPrefLevel().getPrefProlog())) continue; String name = (PreferenceLevel.sRequired.equals(pref.getPrefLevel().getPrefProlog())?" ":"!")+pref.getDistributionType().getAbbreviation(); if (name.toUpperCase().startsWith("!SAME ")) name = " Diff"+name.substring(5); int line = 0; for (Iterator j=new TreeSet(pref.getDistributionObjects()).iterator();j.hasNext();) { DistributionObject xObj = (DistributionObject)j.next(); if (xObj.equals(dObj)) continue; Exam x = (Exam)xObj.getPrefGroup(); for (Iterator k=new TreeSet(x.getOwners()).iterator();k.hasNext();) { ExamOwner own = (ExamOwner)k.next(); times.add(rpad(rpad(line>0?"":name,name.length())+" "+own.getLabel(),26)); line++; } } } } else { if (exam.getRooms()==null || exam.getRooms().isEmpty()) { rooms.add(" "+rpad(iNoRoom, 22)); } else for (ExamRoomInfo room : exam.getRooms()) { rooms.add(" "+formatRoom(room.getName())+" "+ lpad(""+room.getCapacity(),4)+" "+ lpad(""+room.getExamCapacity(),5)); } times.add(" "+rpad(formatPeriod(exam.getPeriod()),25)); } Vector<String> meetsWith = new Vector(); int cnt = 0; int maxCnt = Math.max(4,Math.max(rooms.size(), times.size())-1); for (ExamSectionInfo section : exam.getSections()) { if (section.getOwnerType()==ExamOwner.sOwnerTypeCourse && course.getUniqueId().equals(section.getOwnerId())) continue; if (section.getOwnerType()==ExamOwner.sOwnerTypeOffering && course.getInstructionalOffering().getUniqueId().equals(section.getOwnerId())) continue; if (cnt>=maxCnt) { meetsWith.add(" "+rpad("...",14)); break; } if (iItype) meetsWith.add(" "+rpad(section.getName(),14)); else meetsWith.add(" "+ rpad(section.getSubject(),4)+" "+ rpad(section.getCourseNbr(),5)+" "+ rpad(section.getSection(),3)); cnt++; } int nrLines = Math.max(Math.max(rooms.size(), meetsWith.size()),times.size()); for (int idx = 0; idx < nrLines; idx++) { String room = (idx<rooms.size()?rooms.elementAt(idx):rpad("",23)); String mw = (idx<meetsWith.size()?meetsWith.elementAt(idx):""); String time = (idx<times.size()?times.elementAt(idx):rpad("",26)); println(rpad(idx>0 || iCoursePrinted?"":courseName,11)+" "+ rpad(idx>0 || iCoursePrinted?"":course.getTitle()==null?"":course.getTitle(),(iDispLimits?28:48))+" "+ (iDispLimits?lpad(idx>0 || iCoursePrinted?"":courseLimit<=0?unlimited?" inf":"":String.valueOf(courseLimit),9)+" "+ lpad(idx>0 || iCoursePrinted || enrl<=0?"":String.valueOf(enrl),9)+" ":"")+ lpad(idx>0?"":exam.getSeatingType()==Exam.sSeatingTypeExam?"yes":"no",4)+" "+ lpad(idx>0?"":String.valueOf(exam.getLength()),3)+ time+room+mw ); } iCoursePrinted = !iNewPage; } } TreeSet<SchedulingSubpart> subparts = new TreeSet(new SchedulingSubpartComparator()); for (Iterator i=co.getInstructionalOffering().getInstrOfferingConfigs().iterator();i.hasNext();) { InstrOfferingConfig cfg = (InstrOfferingConfig)i.next(); subparts.addAll(cfg.getSchedulingSubparts()); } boolean hasSubpartExam = false; InstrOfferingConfig cfg = null; for (SchedulingSubpart subpart : subparts) { if (cfg==null) { cfg = subpart.getInstrOfferingConfig(); } else if (!cfg.equals(subpart.getInstrOfferingConfig())) { cfg = subpart.getInstrOfferingConfig(); hasSubpartExam = false; } iITypePrinted = false; TreeSet<Class_> classes = new TreeSet(new Comparator<Class_>() { public int compare(Class_ c1, Class_ c2) { if (iUseClassSuffix) { String sx1 = c1.getClassSuffix(); String sx2 = c2.getClassSuffix(); if (sx1!=null && sx2!=null) return sx1.compareTo(sx2); } return c1.getSectionNumber().compareTo(c2.getSectionNumber()); } }); classes.addAll(subpart.getClasses()); String mw = null; String message = null; TreeSet<ExamAssignmentInfo> exams = null; int minEnrl = 0, maxEnrl = 0, minLimit = 0, maxLimit = 0; Vector<Class_> same = new Vector(); boolean hasSectionExam = false, allSectionsHaveExam = true; for (Class_ clazz : classes) { if (!getExams(clazz).isEmpty()) { hasSectionExam = true; } else { allSectionsHaveExam = false; } } if (allSectionsHaveExam && classes.size()>1) hasSubpartExam = true; if (allSectionsHaveExam) hasCourseExam = true; for (Class_ clazz : classes) { int enrl = ((Number)new _RootDAO().getSession().createQuery( "select count(*) from StudentClassEnrollment s where s.clazz.uniqueId=:classId") .setLong("classId", clazz.getUniqueId()).uniqueResult()).intValue(); if (!same.isEmpty() && (iSkipHoles || same.lastElement().getSectionNumber()+1==clazz.getSectionNumber()) && ToolBox.equals(clazz.getSchedulePrintNote(), same.lastElement().getSchedulePrintNote()) && exams.equals(getExams(clazz)) && mw.equals(getMeetWith(clazz, null)) && message.equals(getMessage(clazz, hasCourseExam, hasSectionExam))) { minEnrl = Math.min(minEnrl, enrl); maxEnrl = Math.max(maxEnrl, enrl); minLimit = Math.min(minLimit, clazz.getClassLimit()); maxLimit = Math.max(maxLimit, clazz.getClassLimit()); message = getMessage(clazz, hasCourseExam, hasSectionExam); same.add(clazz); continue; } if (!same.isEmpty()) { print(same, hasCourseExam, hasSectionExam, minLimit, maxLimit, minEnrl, maxEnrl); same.clear(); } exams = getExams(clazz); mw = getMeetWith(clazz, null); minEnrl = maxEnrl = enrl; minLimit = maxLimit = clazz.getClassLimit(); message = getMessage(clazz, hasCourseExam, hasSectionExam); same.add(clazz); } if (!same.isEmpty()) print(same, hasCourseExam || hasSubpartExam, hasSectionExam, minLimit, maxLimit, minEnrl, maxEnrl); } if (!iNewPage) println(""); } } }
isFullTerm(ClassEvent) moved to parent class
JavaSource/org/unitime/timetable/reports/exam/ExamVerificationReport.java
isFullTerm(ClassEvent) moved to parent class
Java
apache-2.0
601a2400c906178ddba0da00f593e86a800d6a75
0
serge-rider/dbeaver,Sargul/dbeaver,Sargul/dbeaver,dbeaver/dbeaver,serge-rider/dbeaver,serge-rider/dbeaver,dbeaver/dbeaver,serge-rider/dbeaver,Sargul/dbeaver,Sargul/dbeaver,dbeaver/dbeaver,dbeaver/dbeaver,Sargul/dbeaver
/* * Copyright (C) 2010-2012 Serge Rieder * [email protected] * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package org.jkiss.dbeaver.model.exec.jdbc; import org.jkiss.dbeaver.model.exec.DBCException; import org.jkiss.dbeaver.model.exec.DBCExecutionContext; import org.jkiss.dbeaver.model.exec.DBCStatementType; import java.sql.Connection; import java.sql.SQLException; import java.util.concurrent.Executor; /** * JDBC connection */ public interface JDBCExecutionContext extends DBCExecutionContext, Connection { Connection getOriginal(); @Override JDBCStatement prepareStatement( DBCStatementType type, String query, boolean scrollable, boolean updatable, boolean returnGeneratedKeys) throws DBCException; @Override JDBCDatabaseMetaData getMetaData() throws SQLException; @Override JDBCStatement createStatement() throws SQLException; @Override JDBCStatement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException; @Override JDBCPreparedStatement prepareStatement(String sql) throws SQLException; @Override JDBCCallableStatement prepareCall(String sql) throws SQLException; @Override JDBCPreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException; @Override JDBCCallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException; @Override JDBCPreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException; @Override JDBCCallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException; @Override JDBCPreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException; @Override JDBCPreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException; @Override JDBCPreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException; @Override void close(); @Override String getSchema() throws SQLException; @Override void setSchema(String schema) throws SQLException; @Override void abort(Executor executor) throws SQLException; @Override void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException; @Override int getNetworkTimeout() throws SQLException; }
plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/model/exec/jdbc/JDBCExecutionContext.java
/* * Copyright (C) 2010-2012 Serge Rieder * [email protected] * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package org.jkiss.dbeaver.model.exec.jdbc; import org.jkiss.dbeaver.model.exec.DBCException; import org.jkiss.dbeaver.model.exec.DBCExecutionContext; import org.jkiss.dbeaver.model.exec.DBCStatementType; import java.sql.Connection; import java.sql.SQLException; /** * JDBC connection */ public interface JDBCExecutionContext extends DBCExecutionContext, Connection { Connection getOriginal(); @Override JDBCStatement prepareStatement( DBCStatementType type, String query, boolean scrollable, boolean updatable, boolean returnGeneratedKeys) throws DBCException; @Override JDBCDatabaseMetaData getMetaData() throws SQLException; @Override JDBCStatement createStatement() throws SQLException; @Override JDBCStatement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException; @Override JDBCPreparedStatement prepareStatement(String sql) throws SQLException; @Override JDBCCallableStatement prepareCall(String sql) throws SQLException; @Override JDBCPreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException; @Override JDBCCallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException; @Override JDBCPreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException; @Override JDBCCallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException; @Override JDBCPreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException; @Override JDBCPreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException; @Override JDBCPreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException; @Override void close(); }
JDBC 1.7 compatible fix Former-commit-id: 5a2201bbd158158d31864ee8a63a6eba8271a8be
plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/model/exec/jdbc/JDBCExecutionContext.java
JDBC 1.7 compatible fix
Java
apache-2.0
58f2ceca12e9bdc40735142ddf8fa6def6f90e4d
0
alexkuk/elasticsearch,abibell/elasticsearch,knight1128/elasticsearch,kenshin233/elasticsearch,MichaelLiZhou/elasticsearch,rhoml/elasticsearch,Charlesdong/elasticsearch,ulkas/elasticsearch,mcku/elasticsearch,pritishppai/elasticsearch,huanzhong/elasticsearch,hydro2k/elasticsearch,karthikjaps/elasticsearch,tsohil/elasticsearch,Brijeshrpatel9/elasticsearch,polyfractal/elasticsearch,markwalkom/elasticsearch,sreeramjayan/elasticsearch,JackyMai/elasticsearch,spiegela/elasticsearch,cnfire/elasticsearch-1,overcome/elasticsearch,ESamir/elasticsearch,awislowski/elasticsearch,cwurm/elasticsearch,MisterAndersen/elasticsearch,wbowling/elasticsearch,tebriel/elasticsearch,Liziyao/elasticsearch,sposam/elasticsearch,Collaborne/elasticsearch,wbowling/elasticsearch,linglaiyao1314/elasticsearch,yongminxia/elasticsearch,djschny/elasticsearch,Shekharrajak/elasticsearch,Siddartha07/elasticsearch,ricardocerq/elasticsearch,geidies/elasticsearch,koxa29/elasticsearch,IanvsPoplicola/elasticsearch,lightslife/elasticsearch,obourgain/elasticsearch,masterweb121/elasticsearch,socialrank/elasticsearch,tebriel/elasticsearch,markllama/elasticsearch,dataduke/elasticsearch,pritishppai/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,coding0011/elasticsearch,mmaracic/elasticsearch,huanzhong/elasticsearch,IanvsPoplicola/elasticsearch,karthikjaps/elasticsearch,zhiqinghuang/elasticsearch,markharwood/elasticsearch,yynil/elasticsearch,KimTaehee/elasticsearch,HonzaKral/elasticsearch,markwalkom/elasticsearch,andrejserafim/elasticsearch,rento19962/elasticsearch,davidvgalbraith/elasticsearch,jsgao0/elasticsearch,vvcephei/elasticsearch,ThiagoGarciaAlves/elasticsearch,yongminxia/elasticsearch,jeteve/elasticsearch,petabytedata/elasticsearch,NBSW/elasticsearch,pritishppai/elasticsearch,overcome/elasticsearch,fekaputra/elasticsearch,easonC/elasticsearch,jeteve/elasticsearch,Liziyao/elasticsearch,Uiho/elasticsearch,likaiwalkman/elasticsearch,GlenRSmith/elasticsearch,nknize/elasticsearch,vietlq/elasticsearch,diendt/elasticsearch,henakamaMSFT/elasticsearch,fernandozhu/elasticsearch,martinstuga/elasticsearch,acchen97/elasticsearch,kimimj/elasticsearch,wangtuo/elasticsearch,jprante/elasticsearch,luiseduardohdbackup/elasticsearch,gfyoung/elasticsearch,apepper/elasticsearch,wittyameta/elasticsearch,MjAbuz/elasticsearch,nrkkalyan/elasticsearch,EasonYi/elasticsearch,pablocastro/elasticsearch,schonfeld/elasticsearch,cwurm/elasticsearch,golubev/elasticsearch,Uiho/elasticsearch,lmtwga/elasticsearch,ydsakyclguozi/elasticsearch,MjAbuz/elasticsearch,girirajsharma/elasticsearch,SergVro/elasticsearch,smflorentino/elasticsearch,jimhooker2002/elasticsearch,Shepard1212/elasticsearch,mgalushka/elasticsearch,palecur/elasticsearch,jimhooker2002/elasticsearch,kalimatas/elasticsearch,mjason3/elasticsearch,karthikjaps/elasticsearch,jchampion/elasticsearch,fekaputra/elasticsearch,GlenRSmith/elasticsearch,awislowski/elasticsearch,18098924759/elasticsearch,snikch/elasticsearch,beiske/elasticsearch,alexshadow007/elasticsearch,amit-shar/elasticsearch,jimhooker2002/elasticsearch,pranavraman/elasticsearch,mgalushka/elasticsearch,yongminxia/elasticsearch,Shekharrajak/elasticsearch,petabytedata/elasticsearch,kcompher/elasticsearch,lydonchandra/elasticsearch,njlawton/elasticsearch,himanshuag/elasticsearch,nilabhsagar/elasticsearch,snikch/elasticsearch,ThiagoGarciaAlves/elasticsearch,tkssharma/elasticsearch,wenpos/elasticsearch,schonfeld/elasticsearch,kcompher/elasticsearch,LeoYao/elasticsearch,s1monw/elasticsearch,Siddartha07/elasticsearch,lydonchandra/elasticsearch,kcompher/elasticsearch,Kakakakakku/elasticsearch,socialrank/elasticsearch,dataduke/elasticsearch,dongjoon-hyun/elasticsearch,kubum/elasticsearch,mortonsykes/elasticsearch,trangvh/elasticsearch,C-Bish/elasticsearch,beiske/elasticsearch,chirilo/elasticsearch,SergVro/elasticsearch,Charlesdong/elasticsearch,Shekharrajak/elasticsearch,ESamir/elasticsearch,njlawton/elasticsearch,zeroctu/elasticsearch,overcome/elasticsearch,yanjunh/elasticsearch,pozhidaevak/elasticsearch,fred84/elasticsearch,18098924759/elasticsearch,wenpos/elasticsearch,iacdingping/elasticsearch,nazarewk/elasticsearch,ydsakyclguozi/elasticsearch,robin13/elasticsearch,mcku/elasticsearch,fforbeck/elasticsearch,jango2015/elasticsearch,adrianbk/elasticsearch,JervyShi/elasticsearch,martinstuga/elasticsearch,masterweb121/elasticsearch,humandb/elasticsearch,alexbrasetvik/elasticsearch,ivansun1010/elasticsearch,karthikjaps/elasticsearch,infusionsoft/elasticsearch,ckclark/elasticsearch,EasonYi/elasticsearch,camilojd/elasticsearch,Shekharrajak/elasticsearch,gingerwizard/elasticsearch,cnfire/elasticsearch-1,mnylen/elasticsearch,LeoYao/elasticsearch,Flipkart/elasticsearch,henakamaMSFT/elasticsearch,episerver/elasticsearch,mute/elasticsearch,ImpressTV/elasticsearch,umeshdangat/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,Fsero/elasticsearch,fred84/elasticsearch,AshishThakur/elasticsearch,tsohil/elasticsearch,wbowling/elasticsearch,linglaiyao1314/elasticsearch,HarishAtGitHub/elasticsearch,cnfire/elasticsearch-1,TonyChai24/ESSource,naveenhooda2000/elasticsearch,rmuir/elasticsearch,nrkkalyan/elasticsearch,TonyChai24/ESSource,szroland/elasticsearch,bawse/elasticsearch,Charlesdong/elasticsearch,alexbrasetvik/elasticsearch,javachengwc/elasticsearch,lchennup/elasticsearch,tsohil/elasticsearch,luiseduardohdbackup/elasticsearch,vroyer/elassandra,dongjoon-hyun/elasticsearch,beiske/elasticsearch,vvcephei/elasticsearch,sposam/elasticsearch,mgalushka/elasticsearch,wayeast/elasticsearch,a2lin/elasticsearch,tkssharma/elasticsearch,huanzhong/elasticsearch,iantruslove/elasticsearch,awislowski/elasticsearch,nomoa/elasticsearch,slavau/elasticsearch,Flipkart/elasticsearch,bawse/elasticsearch,mrorii/elasticsearch,IanvsPoplicola/elasticsearch,huypx1292/elasticsearch,pablocastro/elasticsearch,khiraiwa/elasticsearch,MjAbuz/elasticsearch,kimimj/elasticsearch,schonfeld/elasticsearch,robin13/elasticsearch,slavau/elasticsearch,lmtwga/elasticsearch,hirdesh2008/elasticsearch,a2lin/elasticsearch,mjhennig/elasticsearch,wimvds/elasticsearch,kingaj/elasticsearch,vrkansagara/elasticsearch,huypx1292/elasticsearch,artnowo/elasticsearch,StefanGor/elasticsearch,i-am-Nathan/elasticsearch,shreejay/elasticsearch,ThiagoGarciaAlves/elasticsearch,robin13/elasticsearch,jpountz/elasticsearch,ckclark/elasticsearch,artnowo/elasticsearch,vrkansagara/elasticsearch,pranavraman/elasticsearch,szroland/elasticsearch,infusionsoft/elasticsearch,shreejay/elasticsearch,slavau/elasticsearch,mm0/elasticsearch,AshishThakur/elasticsearch,kevinkluge/elasticsearch,hirdesh2008/elasticsearch,StefanGor/elasticsearch,sneivandt/elasticsearch,kaneshin/elasticsearch,lmtwga/elasticsearch,mm0/elasticsearch,Uiho/elasticsearch,xingguang2013/elasticsearch,jchampion/elasticsearch,wittyameta/elasticsearch,davidvgalbraith/elasticsearch,sneivandt/elasticsearch,LeoYao/elasticsearch,elancom/elasticsearch,xingguang2013/elasticsearch,jbertouch/elasticsearch,nrkkalyan/elasticsearch,MetSystem/elasticsearch,nrkkalyan/elasticsearch,yuy168/elasticsearch,KimTaehee/elasticsearch,markharwood/elasticsearch,JervyShi/elasticsearch,mjason3/elasticsearch,thecocce/elasticsearch,btiernay/elasticsearch,NBSW/elasticsearch,amaliujia/elasticsearch,kalimatas/elasticsearch,Rygbee/elasticsearch,Helen-Zhao/elasticsearch,LewayneNaidoo/elasticsearch,tkssharma/elasticsearch,markwalkom/elasticsearch,pritishppai/elasticsearch,andrestc/elasticsearch,jaynblue/elasticsearch,masaruh/elasticsearch,luiseduardohdbackup/elasticsearch,ImpressTV/elasticsearch,xuzha/elasticsearch,Brijeshrpatel9/elasticsearch,adrianbk/elasticsearch,lightslife/elasticsearch,Rygbee/elasticsearch,ImpressTV/elasticsearch,ESamir/elasticsearch,Helen-Zhao/elasticsearch,knight1128/elasticsearch,Chhunlong/elasticsearch,avikurapati/elasticsearch,rlugojr/elasticsearch,mcku/elasticsearch,mgalushka/elasticsearch,huanzhong/elasticsearch,ulkas/elasticsearch,thecocce/elasticsearch,LeoYao/elasticsearch,JSCooke/elasticsearch,nazarewk/elasticsearch,fernandozhu/elasticsearch,yuy168/elasticsearch,clintongormley/elasticsearch,fernandozhu/elasticsearch,jbertouch/elasticsearch,pozhidaevak/elasticsearch,jaynblue/elasticsearch,PhaedrusTheGreek/elasticsearch,mkis-/elasticsearch,hanswang/elasticsearch,jaynblue/elasticsearch,brandonkearby/elasticsearch,Collaborne/elasticsearch,shreejay/elasticsearch,chirilo/elasticsearch,MetSystem/elasticsearch,aglne/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,iantruslove/elasticsearch,maddin2016/elasticsearch,kingaj/elasticsearch,huypx1292/elasticsearch,slavau/elasticsearch,rlugojr/elasticsearch,iantruslove/elasticsearch,Collaborne/elasticsearch,infusionsoft/elasticsearch,umeshdangat/elasticsearch,yynil/elasticsearch,rento19962/elasticsearch,Shekharrajak/elasticsearch,mcku/elasticsearch,kimimj/elasticsearch,markharwood/elasticsearch,hydro2k/elasticsearch,mohit/elasticsearch,diendt/elasticsearch,spiegela/elasticsearch,coding0011/elasticsearch,Stacey-Gammon/elasticsearch,knight1128/elasticsearch,strapdata/elassandra-test,LewayneNaidoo/elasticsearch,maddin2016/elasticsearch,Helen-Zhao/elasticsearch,khiraiwa/elasticsearch,lmtwga/elasticsearch,jchampion/elasticsearch,wuranbo/elasticsearch,fernandozhu/elasticsearch,Rygbee/elasticsearch,kaneshin/elasticsearch,camilojd/elasticsearch,zhiqinghuang/elasticsearch,dylan8902/elasticsearch,Uiho/elasticsearch,truemped/elasticsearch,amit-shar/elasticsearch,alexshadow007/elasticsearch,sdauletau/elasticsearch,iacdingping/elasticsearch,cwurm/elasticsearch,Ansh90/elasticsearch,Kakakakakku/elasticsearch,himanshuag/elasticsearch,wimvds/elasticsearch,mnylen/elasticsearch,Liziyao/elasticsearch,clintongormley/elasticsearch,springning/elasticsearch,sneivandt/elasticsearch,LewayneNaidoo/elasticsearch,elancom/elasticsearch,btiernay/elasticsearch,shreejay/elasticsearch,wangtuo/elasticsearch,obourgain/elasticsearch,tkssharma/elasticsearch,ThalaivaStars/OrgRepo1,vingupta3/elasticsearch,AndreKR/elasticsearch,kevinkluge/elasticsearch,iantruslove/elasticsearch,jimhooker2002/elasticsearch,wangtuo/elasticsearch,lmtwga/elasticsearch,scorpionvicky/elasticsearch,sauravmondallive/elasticsearch,Collaborne/elasticsearch,Flipkart/elasticsearch,qwerty4030/elasticsearch,fekaputra/elasticsearch,humandb/elasticsearch,hirdesh2008/elasticsearch,rento19962/elasticsearch,Helen-Zhao/elasticsearch,gingerwizard/elasticsearch,hirdesh2008/elasticsearch,palecur/elasticsearch,infusionsoft/elasticsearch,gingerwizard/elasticsearch,lydonchandra/elasticsearch,easonC/elasticsearch,kubum/elasticsearch,chirilo/elasticsearch,MisterAndersen/elasticsearch,franklanganke/elasticsearch,drewr/elasticsearch,andrejserafim/elasticsearch,wimvds/elasticsearch,HarishAtGitHub/elasticsearch,mbrukman/elasticsearch,sauravmondallive/elasticsearch,skearns64/elasticsearch,mcku/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,chirilo/elasticsearch,18098924759/elasticsearch,wimvds/elasticsearch,acchen97/elasticsearch,yongminxia/elasticsearch,jbertouch/elasticsearch,sreeramjayan/elasticsearch,rajanm/elasticsearch,girirajsharma/elasticsearch,nomoa/elasticsearch,lks21c/elasticsearch,LewayneNaidoo/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,hechunwen/elasticsearch,MaineC/elasticsearch,mrorii/elasticsearch,markwalkom/elasticsearch,artnowo/elasticsearch,pranavraman/elasticsearch,lightslife/elasticsearch,Siddartha07/elasticsearch,Stacey-Gammon/elasticsearch,iamjakob/elasticsearch,vingupta3/elasticsearch,ulkas/elasticsearch,camilojd/elasticsearch,mikemccand/elasticsearch,ZTE-PaaS/elasticsearch,xpandan/elasticsearch,truemped/elasticsearch,ydsakyclguozi/elasticsearch,franklanganke/elasticsearch,wangyuxue/elasticsearch,ricardocerq/elasticsearch,apepper/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,onegambler/elasticsearch,LeoYao/elasticsearch,episerver/elasticsearch,pritishppai/elasticsearch,truemped/elasticsearch,adrianbk/elasticsearch,lightslife/elasticsearch,bestwpw/elasticsearch,hirdesh2008/elasticsearch,markharwood/elasticsearch,markllama/elasticsearch,kubum/elasticsearch,YosuaMichael/elasticsearch,clintongormley/elasticsearch,truemped/elasticsearch,jsgao0/elasticsearch,tebriel/elasticsearch,mjhennig/elasticsearch,schonfeld/elasticsearch,naveenhooda2000/elasticsearch,davidvgalbraith/elasticsearch,JackyMai/elasticsearch,thecocce/elasticsearch,jchampion/elasticsearch,strapdata/elassandra,wangyuxue/elasticsearch,mjason3/elasticsearch,rento19962/elasticsearch,MetSystem/elasticsearch,mcku/elasticsearch,Ansh90/elasticsearch,JSCooke/elasticsearch,JervyShi/elasticsearch,JackyMai/elasticsearch,sposam/elasticsearch,njlawton/elasticsearch,franklanganke/elasticsearch,AndreKR/elasticsearch,gingerwizard/elasticsearch,HonzaKral/elasticsearch,clintongormley/elasticsearch,ImpressTV/elasticsearch,amit-shar/elasticsearch,NBSW/elasticsearch,alexbrasetvik/elasticsearch,hafkensite/elasticsearch,sc0ttkclark/elasticsearch,areek/elasticsearch,rhoml/elasticsearch,AshishThakur/elasticsearch,infusionsoft/elasticsearch,sdauletau/elasticsearch,aglne/elasticsearch,robin13/elasticsearch,pranavraman/elasticsearch,onegambler/elasticsearch,ouyangkongtong/elasticsearch,koxa29/elasticsearch,dongjoon-hyun/elasticsearch,vrkansagara/elasticsearch,kenshin233/elasticsearch,nomoa/elasticsearch,areek/elasticsearch,scottsom/elasticsearch,jchampion/elasticsearch,springning/elasticsearch,spiegela/elasticsearch,ZTE-PaaS/elasticsearch,rajanm/elasticsearch,diendt/elasticsearch,gfyoung/elasticsearch,knight1128/elasticsearch,18098924759/elasticsearch,MjAbuz/elasticsearch,coding0011/elasticsearch,drewr/elasticsearch,mbrukman/elasticsearch,caengcjd/elasticsearch,overcome/elasticsearch,schonfeld/elasticsearch,wittyameta/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,jeteve/elasticsearch,jpountz/elasticsearch,alexkuk/elasticsearch,awislowski/elasticsearch,kingaj/elasticsearch,masterweb121/elasticsearch,ouyangkongtong/elasticsearch,Kakakakakku/elasticsearch,mute/elasticsearch,strapdata/elassandra,andrejserafim/elasticsearch,lzo/elasticsearch-1,ESamir/elasticsearch,YosuaMichael/elasticsearch,PhaedrusTheGreek/elasticsearch,wuranbo/elasticsearch,xuzha/elasticsearch,kevinkluge/elasticsearch,queirozfcom/elasticsearch,Widen/elasticsearch,xpandan/elasticsearch,lzo/elasticsearch-1,zhiqinghuang/elasticsearch,springning/elasticsearch,tebriel/elasticsearch,vrkansagara/elasticsearch,obourgain/elasticsearch,cwurm/elasticsearch,Ansh90/elasticsearch,scottsom/elasticsearch,karthikjaps/elasticsearch,abibell/elasticsearch,Collaborne/elasticsearch,MisterAndersen/elasticsearch,mjhennig/elasticsearch,ricardocerq/elasticsearch,thecocce/elasticsearch,IanvsPoplicola/elasticsearch,likaiwalkman/elasticsearch,awislowski/elasticsearch,mrorii/elasticsearch,TonyChai24/ESSource,likaiwalkman/elasticsearch,dpursehouse/elasticsearch,gmarz/elasticsearch,mbrukman/elasticsearch,hirdesh2008/elasticsearch,aglne/elasticsearch,scottsom/elasticsearch,petabytedata/elasticsearch,Rygbee/elasticsearch,Chhunlong/elasticsearch,yynil/elasticsearch,ThalaivaStars/OrgRepo1,NBSW/elasticsearch,geidies/elasticsearch,kalburgimanjunath/elasticsearch,koxa29/elasticsearch,sarwarbhuiyan/elasticsearch,nilabhsagar/elasticsearch,kenshin233/elasticsearch,jango2015/elasticsearch,Siddartha07/elasticsearch,pablocastro/elasticsearch,lchennup/elasticsearch,mbrukman/elasticsearch,ouyangkongtong/elasticsearch,mgalushka/elasticsearch,wangtuo/elasticsearch,i-am-Nathan/elasticsearch,kalburgimanjunath/elasticsearch,hydro2k/elasticsearch,avikurapati/elasticsearch,strapdata/elassandra-test,hirdesh2008/elasticsearch,huanzhong/elasticsearch,hechunwen/elasticsearch,IanvsPoplicola/elasticsearch,TonyChai24/ESSource,F0lha/elasticsearch,mikemccand/elasticsearch,cnfire/elasticsearch-1,sposam/elasticsearch,vingupta3/elasticsearch,nknize/elasticsearch,achow/elasticsearch,weipinghe/elasticsearch,Fsero/elasticsearch,jimczi/elasticsearch,phani546/elasticsearch,ckclark/elasticsearch,phani546/elasticsearch,mohit/elasticsearch,sauravmondallive/elasticsearch,dylan8902/elasticsearch,onegambler/elasticsearch,sneivandt/elasticsearch,jprante/elasticsearch,beiske/elasticsearch,C-Bish/elasticsearch,tkssharma/elasticsearch,AshishThakur/elasticsearch,markllama/elasticsearch,andrestc/elasticsearch,loconsolutions/elasticsearch,acchen97/elasticsearch,geidies/elasticsearch,dpursehouse/elasticsearch,cnfire/elasticsearch-1,polyfractal/elasticsearch,onegambler/elasticsearch,umeshdangat/elasticsearch,knight1128/elasticsearch,Siddartha07/elasticsearch,jw0201/elastic,truemped/elasticsearch,amaliujia/elasticsearch,ouyangkongtong/elasticsearch,AndreKR/elasticsearch,sdauletau/elasticsearch,bawse/elasticsearch,PhaedrusTheGreek/elasticsearch,abibell/elasticsearch,wbowling/elasticsearch,jsgao0/elasticsearch,a2lin/elasticsearch,himanshuag/elasticsearch,jimhooker2002/elasticsearch,Fsero/elasticsearch,martinstuga/elasticsearch,chirilo/elasticsearch,kingaj/elasticsearch,petabytedata/elasticsearch,rhoml/elasticsearch,Uiho/elasticsearch,C-Bish/elasticsearch,kenshin233/elasticsearch,nazarewk/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,luiseduardohdbackup/elasticsearch,markharwood/elasticsearch,JSCooke/elasticsearch,scorpionvicky/elasticsearch,nilabhsagar/elasticsearch,Ansh90/elasticsearch,HonzaKral/elasticsearch,KimTaehee/elasticsearch,hanswang/elasticsearch,hafkensite/elasticsearch,strapdata/elassandra,fooljohnny/elasticsearch,henakamaMSFT/elasticsearch,tkssharma/elasticsearch,dylan8902/elasticsearch,amit-shar/elasticsearch,milodky/elasticsearch,lzo/elasticsearch-1,KimTaehee/elasticsearch,abibell/elasticsearch,adrianbk/elasticsearch,zkidkid/elasticsearch,alexshadow007/elasticsearch,sposam/elasticsearch,maddin2016/elasticsearch,jimczi/elasticsearch,alexbrasetvik/elasticsearch,tahaemin/elasticsearch,nellicus/elasticsearch,EasonYi/elasticsearch,gingerwizard/elasticsearch,achow/elasticsearch,Ansh90/elasticsearch,mapr/elasticsearch,elancom/elasticsearch,caengcjd/elasticsearch,kalburgimanjunath/elasticsearch,achow/elasticsearch,bestwpw/elasticsearch,dongjoon-hyun/elasticsearch,pablocastro/elasticsearch,amaliujia/elasticsearch,geidies/elasticsearch,HarishAtGitHub/elasticsearch,girirajsharma/elasticsearch,linglaiyao1314/elasticsearch,mbrukman/elasticsearch,easonC/elasticsearch,MjAbuz/elasticsearch,slavau/elasticsearch,wayeast/elasticsearch,Brijeshrpatel9/elasticsearch,mnylen/elasticsearch,mapr/elasticsearch,rajanm/elasticsearch,geidies/elasticsearch,episerver/elasticsearch,ivansun1010/elasticsearch,amit-shar/elasticsearch,kalburgimanjunath/elasticsearch,ulkas/elasticsearch,nezirus/elasticsearch,nazarewk/elasticsearch,jaynblue/elasticsearch,kevinkluge/elasticsearch,hanswang/elasticsearch,Brijeshrpatel9/elasticsearch,golubev/elasticsearch,uschindler/elasticsearch,qwerty4030/elasticsearch,HarishAtGitHub/elasticsearch,Uiho/elasticsearch,tahaemin/elasticsearch,MaineC/elasticsearch,zeroctu/elasticsearch,vingupta3/elasticsearch,kaneshin/elasticsearch,lchennup/elasticsearch,hechunwen/elasticsearch,zhiqinghuang/elasticsearch,acchen97/elasticsearch,sreeramjayan/elasticsearch,zkidkid/elasticsearch,kalburgimanjunath/elasticsearch,tahaemin/elasticsearch,camilojd/elasticsearch,areek/elasticsearch,Widen/elasticsearch,pozhidaevak/elasticsearch,zhiqinghuang/elasticsearch,rmuir/elasticsearch,koxa29/elasticsearch,Flipkart/elasticsearch,humandb/elasticsearch,mmaracic/elasticsearch,StefanGor/elasticsearch,fooljohnny/elasticsearch,vingupta3/elasticsearch,AndreKR/elasticsearch,lightslife/elasticsearch,franklanganke/elasticsearch,onegambler/elasticsearch,ThiagoGarciaAlves/elasticsearch,Collaborne/elasticsearch,zeroctu/elasticsearch,schonfeld/elasticsearch,i-am-Nathan/elasticsearch,easonC/elasticsearch,gmarz/elasticsearch,strapdata/elassandra5-rc,areek/elasticsearch,huanzhong/elasticsearch,elasticdog/elasticsearch,acchen97/elasticsearch,liweinan0423/elasticsearch,sauravmondallive/elasticsearch,skearns64/elasticsearch,elancom/elasticsearch,mkis-/elasticsearch,zkidkid/elasticsearch,mute/elasticsearch,sc0ttkclark/elasticsearch,truemped/elasticsearch,Shepard1212/elasticsearch,sdauletau/elasticsearch,girirajsharma/elasticsearch,djschny/elasticsearch,winstonewert/elasticsearch,masterweb121/elasticsearch,mmaracic/elasticsearch,iacdingping/elasticsearch,naveenhooda2000/elasticsearch,nellicus/elasticsearch,liweinan0423/elasticsearch,ckclark/elasticsearch,snikch/elasticsearch,Kakakakakku/elasticsearch,vvcephei/elasticsearch,Shepard1212/elasticsearch,mikemccand/elasticsearch,kaneshin/elasticsearch,schonfeld/elasticsearch,jeteve/elasticsearch,fred84/elasticsearch,andrestc/elasticsearch,fooljohnny/elasticsearch,vrkansagara/elasticsearch,mm0/elasticsearch,ricardocerq/elasticsearch,dataduke/elasticsearch,myelin/elasticsearch,himanshuag/elasticsearch,Liziyao/elasticsearch,djschny/elasticsearch,AshishThakur/elasticsearch,wenpos/elasticsearch,mute/elasticsearch,mkis-/elasticsearch,Charlesdong/elasticsearch,yanjunh/elasticsearch,MichaelLiZhou/elasticsearch,ThiagoGarciaAlves/elasticsearch,MaineC/elasticsearch,vingupta3/elasticsearch,zhiqinghuang/elasticsearch,wenpos/elasticsearch,kubum/elasticsearch,18098924759/elasticsearch,ulkas/elasticsearch,likaiwalkman/elasticsearch,alexkuk/elasticsearch,hechunwen/elasticsearch,shreejay/elasticsearch,phani546/elasticsearch,mmaracic/elasticsearch,robin13/elasticsearch,zkidkid/elasticsearch,nellicus/elasticsearch,MichaelLiZhou/elasticsearch,huypx1292/elasticsearch,weipinghe/elasticsearch,loconsolutions/elasticsearch,lzo/elasticsearch-1,scorpionvicky/elasticsearch,naveenhooda2000/elasticsearch,YosuaMichael/elasticsearch,smflorentino/elasticsearch,achow/elasticsearch,polyfractal/elasticsearch,YosuaMichael/elasticsearch,MetSystem/elasticsearch,sarwarbhuiyan/elasticsearch,kalimatas/elasticsearch,Shepard1212/elasticsearch,naveenhooda2000/elasticsearch,ckclark/elasticsearch,zeroctu/elasticsearch,knight1128/elasticsearch,rento19962/elasticsearch,strapdata/elassandra5-rc,mute/elasticsearch,dataduke/elasticsearch,kaneshin/elasticsearch,queirozfcom/elasticsearch,trangvh/elasticsearch,kubum/elasticsearch,fooljohnny/elasticsearch,martinstuga/elasticsearch,golubev/elasticsearch,smflorentino/elasticsearch,brandonkearby/elasticsearch,likaiwalkman/elasticsearch,socialrank/elasticsearch,Liziyao/elasticsearch,brandonkearby/elasticsearch,amaliujia/elasticsearch,yuy168/elasticsearch,alexshadow007/elasticsearch,mkis-/elasticsearch,mjason3/elasticsearch,fred84/elasticsearch,Widen/elasticsearch,masaruh/elasticsearch,hanswang/elasticsearch,aglne/elasticsearch,mnylen/elasticsearch,iacdingping/elasticsearch,milodky/elasticsearch,nknize/elasticsearch,kaneshin/elasticsearch,likaiwalkman/elasticsearch,jsgao0/elasticsearch,mortonsykes/elasticsearch,dylan8902/elasticsearch,dataduke/elasticsearch,nilabhsagar/elasticsearch,nellicus/elasticsearch,nrkkalyan/elasticsearch,Brijeshrpatel9/elasticsearch,ulkas/elasticsearch,masaruh/elasticsearch,obourgain/elasticsearch,JervyShi/elasticsearch,drewr/elasticsearch,xingguang2013/elasticsearch,apepper/elasticsearch,queirozfcom/elasticsearch,sc0ttkclark/elasticsearch,mohit/elasticsearch,tsohil/elasticsearch,beiske/elasticsearch,andrestc/elasticsearch,luiseduardohdbackup/elasticsearch,jimhooker2002/elasticsearch,davidvgalbraith/elasticsearch,ydsakyclguozi/elasticsearch,winstonewert/elasticsearch,wuranbo/elasticsearch,Rygbee/elasticsearch,hanswang/elasticsearch,xingguang2013/elasticsearch,smflorentino/elasticsearch,hafkensite/elasticsearch,slavau/elasticsearch,loconsolutions/elasticsearch,Liziyao/elasticsearch,amaliujia/elasticsearch,phani546/elasticsearch,MetSystem/elasticsearch,drewr/elasticsearch,andrestc/elasticsearch,rajanm/elasticsearch,fekaputra/elasticsearch,lzo/elasticsearch-1,Chhunlong/elasticsearch,ThalaivaStars/OrgRepo1,vingupta3/elasticsearch,pranavraman/elasticsearch,wittyameta/elasticsearch,andrejserafim/elasticsearch,yanjunh/elasticsearch,LeoYao/elasticsearch,Shepard1212/elasticsearch,dpursehouse/elasticsearch,iamjakob/elasticsearch,AndreKR/elasticsearch,kingaj/elasticsearch,himanshuag/elasticsearch,kcompher/elasticsearch,jprante/elasticsearch,vroyer/elasticassandra,loconsolutions/elasticsearch,vietlq/elasticsearch,abibell/elasticsearch,Stacey-Gammon/elasticsearch,mrorii/elasticsearch,LewayneNaidoo/elasticsearch,sarwarbhuiyan/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,socialrank/elasticsearch,franklanganke/elasticsearch,xpandan/elasticsearch,fforbeck/elasticsearch,onegambler/elasticsearch,mrorii/elasticsearch,strapdata/elassandra-test,pritishppai/elasticsearch,s1monw/elasticsearch,iamjakob/elasticsearch,zeroctu/elasticsearch,lydonchandra/elasticsearch,ZTE-PaaS/elasticsearch,lightslife/elasticsearch,lchennup/elasticsearch,rmuir/elasticsearch,Stacey-Gammon/elasticsearch,szroland/elasticsearch,aglne/elasticsearch,lchennup/elasticsearch,strapdata/elassandra-test,avikurapati/elasticsearch,vietlq/elasticsearch,thecocce/elasticsearch,slavau/elasticsearch,sarwarbhuiyan/elasticsearch,polyfractal/elasticsearch,sdauletau/elasticsearch,winstonewert/elasticsearch,JackyMai/elasticsearch,jprante/elasticsearch,strapdata/elassandra5-rc,coding0011/elasticsearch,diendt/elasticsearch,elasticdog/elasticsearch,amit-shar/elasticsearch,ouyangkongtong/elasticsearch,tebriel/elasticsearch,PhaedrusTheGreek/elasticsearch,bestwpw/elasticsearch,winstonewert/elasticsearch,kunallimaye/elasticsearch,coding0011/elasticsearch,djschny/elasticsearch,rajanm/elasticsearch,pablocastro/elasticsearch,knight1128/elasticsearch,zeroctu/elasticsearch,mapr/elasticsearch,springning/elasticsearch,jw0201/elastic,lks21c/elasticsearch,jimhooker2002/elasticsearch,jw0201/elastic,martinstuga/elasticsearch,kimimj/elasticsearch,jpountz/elasticsearch,jbertouch/elasticsearch,fred84/elasticsearch,cnfire/elasticsearch-1,skearns64/elasticsearch,tahaemin/elasticsearch,spiegela/elasticsearch,ImpressTV/elasticsearch,Rygbee/elasticsearch,Shekharrajak/elasticsearch,kingaj/elasticsearch,jango2015/elasticsearch,TonyChai24/ESSource,ivansun1010/elasticsearch,lydonchandra/elasticsearch,lydonchandra/elasticsearch,djschny/elasticsearch,lks21c/elasticsearch,bestwpw/elasticsearch,HarishAtGitHub/elasticsearch,nrkkalyan/elasticsearch,jimczi/elasticsearch,yynil/elasticsearch,maddin2016/elasticsearch,Rygbee/elasticsearch,EasonYi/elasticsearch,Kakakakakku/elasticsearch,lmtwga/elasticsearch,kenshin233/elasticsearch,geidies/elasticsearch,achow/elasticsearch,andrejserafim/elasticsearch,ESamir/elasticsearch,dpursehouse/elasticsearch,onegambler/elasticsearch,rlugojr/elasticsearch,Brijeshrpatel9/elasticsearch,maddin2016/elasticsearch,GlenRSmith/elasticsearch,Flipkart/elasticsearch,jeteve/elasticsearch,LeoYao/elasticsearch,mm0/elasticsearch,brandonkearby/elasticsearch,diendt/elasticsearch,sarwarbhuiyan/elasticsearch,franklanganke/elasticsearch,mkis-/elasticsearch,Fsero/elasticsearch,a2lin/elasticsearch,jimczi/elasticsearch,i-am-Nathan/elasticsearch,a2lin/elasticsearch,apepper/elasticsearch,lchennup/elasticsearch,xingguang2013/elasticsearch,nilabhsagar/elasticsearch,MetSystem/elasticsearch,rmuir/elasticsearch,luiseduardohdbackup/elasticsearch,vvcephei/elasticsearch,linglaiyao1314/elasticsearch,Widen/elasticsearch,truemped/elasticsearch,sc0ttkclark/elasticsearch,areek/elasticsearch,Liziyao/elasticsearch,huypx1292/elasticsearch,F0lha/elasticsearch,rhoml/elasticsearch,snikch/elasticsearch,rlugojr/elasticsearch,gfyoung/elasticsearch,JervyShi/elasticsearch,girirajsharma/elasticsearch,zhiqinghuang/elasticsearch,kalburgimanjunath/elasticsearch,franklanganke/elasticsearch,ESamir/elasticsearch,weipinghe/elasticsearch,nezirus/elasticsearch,strapdata/elassandra-test,s1monw/elasticsearch,HarishAtGitHub/elasticsearch,btiernay/elasticsearch,weipinghe/elasticsearch,mortonsykes/elasticsearch,bawse/elasticsearch,strapdata/elassandra-test,glefloch/elasticsearch,MaineC/elasticsearch,golubev/elasticsearch,alexshadow007/elasticsearch,yuy168/elasticsearch,SergVro/elasticsearch,nezirus/elasticsearch,kimimj/elasticsearch,GlenRSmith/elasticsearch,artnowo/elasticsearch,mortonsykes/elasticsearch,himanshuag/elasticsearch,myelin/elasticsearch,umeshdangat/elasticsearch,SergVro/elasticsearch,fforbeck/elasticsearch,khiraiwa/elasticsearch,sposam/elasticsearch,ThalaivaStars/OrgRepo1,Chhunlong/elasticsearch,F0lha/elasticsearch,mjhennig/elasticsearch,kevinkluge/elasticsearch,YosuaMichael/elasticsearch,milodky/elasticsearch,nknize/elasticsearch,kevinkluge/elasticsearch,snikch/elasticsearch,davidvgalbraith/elasticsearch,gmarz/elasticsearch,snikch/elasticsearch,ZTE-PaaS/elasticsearch,xpandan/elasticsearch,lzo/elasticsearch-1,pozhidaevak/elasticsearch,masaruh/elasticsearch,yanjunh/elasticsearch,queirozfcom/elasticsearch,kunallimaye/elasticsearch,rento19962/elasticsearch,yongminxia/elasticsearch,weipinghe/elasticsearch,rmuir/elasticsearch,TonyChai24/ESSource,artnowo/elasticsearch,lydonchandra/elasticsearch,khiraiwa/elasticsearch,jaynblue/elasticsearch,rhoml/elasticsearch,fekaputra/elasticsearch,hanswang/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,palecur/elasticsearch,Siddartha07/elasticsearch,Shekharrajak/elasticsearch,gfyoung/elasticsearch,loconsolutions/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,lks21c/elasticsearch,koxa29/elasticsearch,acchen97/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,kalimatas/elasticsearch,ouyangkongtong/elasticsearch,qwerty4030/elasticsearch,episerver/elasticsearch,vietlq/elasticsearch,golubev/elasticsearch,Ansh90/elasticsearch,jpountz/elasticsearch,MichaelLiZhou/elasticsearch,glefloch/elasticsearch,s1monw/elasticsearch,henakamaMSFT/elasticsearch,masterweb121/elasticsearch,uschindler/elasticsearch,sposam/elasticsearch,palecur/elasticsearch,vvcephei/elasticsearch,thecocce/elasticsearch,glefloch/elasticsearch,kunallimaye/elasticsearch,liweinan0423/elasticsearch,hydro2k/elasticsearch,mapr/elasticsearch,rajanm/elasticsearch,sreeramjayan/elasticsearch,infusionsoft/elasticsearch,easonC/elasticsearch,vvcephei/elasticsearch,clintongormley/elasticsearch,strapdata/elassandra,jsgao0/elasticsearch,MichaelLiZhou/elasticsearch,ouyangkongtong/elasticsearch,nezirus/elasticsearch,trangvh/elasticsearch,markllama/elasticsearch,elancom/elasticsearch,socialrank/elasticsearch,sc0ttkclark/elasticsearch,Chhunlong/elasticsearch,Charlesdong/elasticsearch,hydro2k/elasticsearch,cwurm/elasticsearch,MichaelLiZhou/elasticsearch,pozhidaevak/elasticsearch,markwalkom/elasticsearch,mjhennig/elasticsearch,weipinghe/elasticsearch,overcome/elasticsearch,hechunwen/elasticsearch,camilojd/elasticsearch,caengcjd/elasticsearch,yuy168/elasticsearch,likaiwalkman/elasticsearch,queirozfcom/elasticsearch,ThiagoGarciaAlves/elasticsearch,sarwarbhuiyan/elasticsearch,javachengwc/elasticsearch,szroland/elasticsearch,ricardocerq/elasticsearch,i-am-Nathan/elasticsearch,kimimj/elasticsearch,palecur/elasticsearch,milodky/elasticsearch,mapr/elasticsearch,jaynblue/elasticsearch,btiernay/elasticsearch,andrestc/elasticsearch,henakamaMSFT/elasticsearch,rlugojr/elasticsearch,MisterAndersen/elasticsearch,areek/elasticsearch,dylan8902/elasticsearch,adrianbk/elasticsearch,gfyoung/elasticsearch,btiernay/elasticsearch,MjAbuz/elasticsearch,koxa29/elasticsearch,vroyer/elassandra,brandonkearby/elasticsearch,pranavraman/elasticsearch,alexbrasetvik/elasticsearch,nknize/elasticsearch,Widen/elasticsearch,diendt/elasticsearch,uschindler/elasticsearch,ckclark/elasticsearch,apepper/elasticsearch,skearns64/elasticsearch,rhoml/elasticsearch,nellicus/elasticsearch,springning/elasticsearch,mcku/elasticsearch,fforbeck/elasticsearch,szroland/elasticsearch,strapdata/elassandra5-rc,rento19962/elasticsearch,lks21c/elasticsearch,jsgao0/elasticsearch,lightslife/elasticsearch,mute/elasticsearch,iamjakob/elasticsearch,wuranbo/elasticsearch,mapr/elasticsearch,gingerwizard/elasticsearch,clintongormley/elasticsearch,rmuir/elasticsearch,mjason3/elasticsearch,HonzaKral/elasticsearch,C-Bish/elasticsearch,iantruslove/elasticsearch,markwalkom/elasticsearch,kalburgimanjunath/elasticsearch,mbrukman/elasticsearch,ulkas/elasticsearch,queirozfcom/elasticsearch,nomoa/elasticsearch,xuzha/elasticsearch,strapdata/elassandra5-rc,adrianbk/elasticsearch,dylan8902/elasticsearch,mm0/elasticsearch,alexkuk/elasticsearch,sauravmondallive/elasticsearch,djschny/elasticsearch,btiernay/elasticsearch,AndreKR/elasticsearch,mm0/elasticsearch,lchennup/elasticsearch,EasonYi/elasticsearch,javachengwc/elasticsearch,mbrukman/elasticsearch,caengcjd/elasticsearch,socialrank/elasticsearch,mgalushka/elasticsearch,acchen97/elasticsearch,iantruslove/elasticsearch,camilojd/elasticsearch,nrkkalyan/elasticsearch,springning/elasticsearch,uschindler/elasticsearch,elasticdog/elasticsearch,gmarz/elasticsearch,wittyameta/elasticsearch,yanjunh/elasticsearch,vroyer/elasticassandra,skearns64/elasticsearch,luiseduardohdbackup/elasticsearch,elancom/elasticsearch,mkis-/elasticsearch,mikemccand/elasticsearch,sreeramjayan/elasticsearch,abibell/elasticsearch,xpandan/elasticsearch,mnylen/elasticsearch,wayeast/elasticsearch,wayeast/elasticsearch,amit-shar/elasticsearch,kalimatas/elasticsearch,fooljohnny/elasticsearch,sneivandt/elasticsearch,pablocastro/elasticsearch,kenshin233/elasticsearch,tkssharma/elasticsearch,queirozfcom/elasticsearch,iacdingping/elasticsearch,wenpos/elasticsearch,alexkuk/elasticsearch,njlawton/elasticsearch,sreeramjayan/elasticsearch,golubev/elasticsearch,adrianbk/elasticsearch,caengcjd/elasticsearch,martinstuga/elasticsearch,trangvh/elasticsearch,C-Bish/elasticsearch,kcompher/elasticsearch,Charlesdong/elasticsearch,xpandan/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,mohit/elasticsearch,YosuaMichael/elasticsearch,hafkensite/elasticsearch,vietlq/elasticsearch,markllama/elasticsearch,AshishThakur/elasticsearch,bestwpw/elasticsearch,chirilo/elasticsearch,Flipkart/elasticsearch,Helen-Zhao/elasticsearch,strapdata/elassandra,Collaborne/elasticsearch,bawse/elasticsearch,YosuaMichael/elasticsearch,ImpressTV/elasticsearch,MaineC/elasticsearch,humandb/elasticsearch,jpountz/elasticsearch,MjAbuz/elasticsearch,s1monw/elasticsearch,umeshdangat/elasticsearch,mikemccand/elasticsearch,trangvh/elasticsearch,yongminxia/elasticsearch,jango2015/elasticsearch,Charlesdong/elasticsearch,skearns64/elasticsearch,hydro2k/elasticsearch,Stacey-Gammon/elasticsearch,wbowling/elasticsearch,wuranbo/elasticsearch,winstonewert/elasticsearch,hechunwen/elasticsearch,javachengwc/elasticsearch,episerver/elasticsearch,kubum/elasticsearch,wayeast/elasticsearch,Widen/elasticsearch,tsohil/elasticsearch,jimczi/elasticsearch,fekaputra/elasticsearch,18098924759/elasticsearch,wimvds/elasticsearch,masterweb121/elasticsearch,avikurapati/elasticsearch,nellicus/elasticsearch,djschny/elasticsearch,NBSW/elasticsearch,tsohil/elasticsearch,ZTE-PaaS/elasticsearch,bestwpw/elasticsearch,JervyShi/elasticsearch,kingaj/elasticsearch,overcome/elasticsearch,StefanGor/elasticsearch,mgalushka/elasticsearch,kunallimaye/elasticsearch,javachengwc/elasticsearch,qwerty4030/elasticsearch,vrkansagara/elasticsearch,sarwarbhuiyan/elasticsearch,mjhennig/elasticsearch,18098924759/elasticsearch,hydro2k/elasticsearch,drewr/elasticsearch,jw0201/elastic,KimTaehee/elasticsearch,kubum/elasticsearch,elasticdog/elasticsearch,jchampion/elasticsearch,dataduke/elasticsearch,wbowling/elasticsearch,humandb/elasticsearch,mortonsykes/elasticsearch,JSCooke/elasticsearch,fernandozhu/elasticsearch,yuy168/elasticsearch,khiraiwa/elasticsearch,hafkensite/elasticsearch,xuzha/elasticsearch,SergVro/elasticsearch,KimTaehee/elasticsearch,aglne/elasticsearch,kcompher/elasticsearch,wbowling/elasticsearch,amaliujia/elasticsearch,huypx1292/elasticsearch,humandb/elasticsearch,sdauletau/elasticsearch,ThalaivaStars/OrgRepo1,vietlq/elasticsearch,tahaemin/elasticsearch,jbertouch/elasticsearch,elancom/elasticsearch,drewr/elasticsearch,nomoa/elasticsearch,milodky/elasticsearch,infusionsoft/elasticsearch,xuzha/elasticsearch,phani546/elasticsearch,polyfractal/elasticsearch,ivansun1010/elasticsearch,jprante/elasticsearch,lmtwga/elasticsearch,areek/elasticsearch,fekaputra/elasticsearch,ydsakyclguozi/elasticsearch,yynil/elasticsearch,linglaiyao1314/elasticsearch,nellicus/elasticsearch,sdauletau/elasticsearch,weipinghe/elasticsearch,milodky/elasticsearch,xingguang2013/elasticsearch,kcompher/elasticsearch,F0lha/elasticsearch,mrorii/elasticsearch,mmaracic/elasticsearch,beiske/elasticsearch,jango2015/elasticsearch,springning/elasticsearch,huanzhong/elasticsearch,myelin/elasticsearch,ivansun1010/elasticsearch,njlawton/elasticsearch,jango2015/elasticsearch,pritishppai/elasticsearch,zkidkid/elasticsearch,obourgain/elasticsearch,EasonYi/elasticsearch,MichaelLiZhou/elasticsearch,iacdingping/elasticsearch,ivansun1010/elasticsearch,Ansh90/elasticsearch,apepper/elasticsearch,nazarewk/elasticsearch,drewr/elasticsearch,yuy168/elasticsearch,alexkuk/elasticsearch,gmarz/elasticsearch,loconsolutions/elasticsearch,himanshuag/elasticsearch,vietlq/elasticsearch,dataduke/elasticsearch,EasonYi/elasticsearch,jw0201/elastic,vroyer/elasticassandra,hafkensite/elasticsearch,bestwpw/elasticsearch,Siddartha07/elasticsearch,mm0/elasticsearch,myelin/elasticsearch,nezirus/elasticsearch,kimimj/elasticsearch,tsohil/elasticsearch,liweinan0423/elasticsearch,apepper/elasticsearch,iamjakob/elasticsearch,sc0ttkclark/elasticsearch,vroyer/elassandra,wittyameta/elasticsearch,tahaemin/elasticsearch,NBSW/elasticsearch,iamjakob/elasticsearch,glefloch/elasticsearch,szroland/elasticsearch,masaruh/elasticsearch,phani546/elasticsearch,wittyameta/elasticsearch,qwerty4030/elasticsearch,hanswang/elasticsearch,Fsero/elasticsearch,SergVro/elasticsearch,TonyChai24/ESSource,javachengwc/elasticsearch,humandb/elasticsearch,Brijeshrpatel9/elasticsearch,Kakakakakku/elasticsearch,smflorentino/elasticsearch,PhaedrusTheGreek/elasticsearch,wayeast/elasticsearch,myelin/elasticsearch,Fsero/elasticsearch,dpursehouse/elasticsearch,sc0ttkclark/elasticsearch,jeteve/elasticsearch,khiraiwa/elasticsearch,dylan8902/elasticsearch,markllama/elasticsearch,tahaemin/elasticsearch,xingguang2013/elasticsearch,MisterAndersen/elasticsearch,MetSystem/elasticsearch,mnylen/elasticsearch,ydsakyclguozi/elasticsearch,andrestc/elasticsearch,scottsom/elasticsearch,petabytedata/elasticsearch,ThalaivaStars/OrgRepo1,iantruslove/elasticsearch,wangtuo/elasticsearch,kunallimaye/elasticsearch,F0lha/elasticsearch,achow/elasticsearch,kunallimaye/elasticsearch,polyfractal/elasticsearch,mjhennig/elasticsearch,Uiho/elasticsearch,caengcjd/elasticsearch,beiske/elasticsearch,JSCooke/elasticsearch,StefanGor/elasticsearch,KimTaehee/elasticsearch,F0lha/elasticsearch,petabytedata/elasticsearch,avikurapati/elasticsearch,Widen/elasticsearch,karthikjaps/elasticsearch,girirajsharma/elasticsearch,yynil/elasticsearch,mute/elasticsearch,socialrank/elasticsearch,dongjoon-hyun/elasticsearch,ckclark/elasticsearch,scottsom/elasticsearch,PhaedrusTheGreek/elasticsearch,tebriel/elasticsearch,abibell/elasticsearch,kevinkluge/elasticsearch,spiegela/elasticsearch,GlenRSmith/elasticsearch,andrejserafim/elasticsearch,cnfire/elasticsearch-1,Fsero/elasticsearch,strapdata/elassandra-test,markharwood/elasticsearch,linglaiyao1314/elasticsearch,lzo/elasticsearch-1,fforbeck/elasticsearch,davidvgalbraith/elasticsearch,jw0201/elastic,scorpionvicky/elasticsearch,elasticdog/elasticsearch,wayeast/elasticsearch,glefloch/elasticsearch,gingerwizard/elasticsearch,pranavraman/elasticsearch,btiernay/elasticsearch,linglaiyao1314/elasticsearch,uschindler/elasticsearch,liweinan0423/elasticsearch,jango2015/elasticsearch,jbertouch/elasticsearch,mnylen/elasticsearch,wangyuxue/elasticsearch,jpountz/elasticsearch,iamjakob/elasticsearch,iacdingping/elasticsearch,yongminxia/elasticsearch,ImpressTV/elasticsearch,scorpionvicky/elasticsearch,hafkensite/elasticsearch,markllama/elasticsearch,kenshin233/elasticsearch,masterweb121/elasticsearch,pablocastro/elasticsearch,mohit/elasticsearch,karthikjaps/elasticsearch,zeroctu/elasticsearch,jeteve/elasticsearch,wimvds/elasticsearch,Chhunlong/elasticsearch,Chhunlong/elasticsearch,NBSW/elasticsearch,petabytedata/elasticsearch,wimvds/elasticsearch,PhaedrusTheGreek/elasticsearch,smflorentino/elasticsearch,caengcjd/elasticsearch,sauravmondallive/elasticsearch,JackyMai/elasticsearch,xuzha/elasticsearch,mmaracic/elasticsearch,kunallimaye/elasticsearch,achow/elasticsearch,fooljohnny/elasticsearch,HarishAtGitHub/elasticsearch,alexbrasetvik/elasticsearch,easonC/elasticsearch
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.reducers.derivative; import com.google.common.base.Function; import com.google.common.collect.Lists; import org.elasticsearch.ElasticsearchIllegalStateException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation.Type; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InvalidAggregationPathException; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import org.elasticsearch.search.aggregations.reducers.InternalSimpleValue; import org.elasticsearch.search.aggregations.reducers.Reducer; import org.elasticsearch.search.aggregations.reducers.ReducerFactory; import org.elasticsearch.search.aggregations.reducers.ReducerStreams; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; public class DerivativeReducer extends Reducer { public final static Type TYPE = new Type("derivative"); public final static ReducerStreams.Stream STREAM = new ReducerStreams.Stream() { @Override public DerivativeReducer readResult(StreamInput in) throws IOException { DerivativeReducer result = new DerivativeReducer(); result.readFrom(in); return result; } }; public static void registerStreams() { ReducerStreams.registerStream(STREAM, TYPE.stream()); } private static final Function<Aggregation, InternalAggregation> FUNCTION = new Function<Aggregation, InternalAggregation>() { @Override public InternalAggregation apply(Aggregation input) { return (InternalAggregation) input; } }; private ValueFormatter formatter; public DerivativeReducer() { } public DerivativeReducer(String name, String[] bucketsPaths, @Nullable ValueFormatter formatter, Map<String, Object> metadata) { super(name, bucketsPaths, metadata); this.formatter = formatter; } @Override public Type type() { return TYPE; } @Override public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) { InternalHistogram<? extends InternalHistogram.Bucket> histo = (InternalHistogram<? extends InternalHistogram.Bucket>) aggregation; List<? extends InternalHistogram.Bucket> buckets = histo.getBuckets(); InternalHistogram.Factory<? extends InternalHistogram.Bucket> factory = histo.getFactory(); List newBuckets = new ArrayList<>(); Double lastBucketValue = null; // NOCOMMIT this needs to be improved so that the aggs are cloned correctly to ensure aggs are fully immutable. for (InternalHistogram.Bucket bucket : buckets) { Double thisBucketValue = resolveBucketValue(histo, bucket); if (lastBucketValue != null) { if (thisBucketValue == null) { throw new ElasticsearchIllegalStateException("FOUND GAP IN DATA"); // NOCOMMIT deal with gaps in data } double diff = thisBucketValue - lastBucketValue; List<InternalAggregation> aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(), FUNCTION)); aggs.add(new InternalSimpleValue(name(), diff, formatter, new ArrayList<Reducer>(), metaData())); InternalHistogram.Bucket newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(), new InternalAggregations( aggs), bucket.getKeyed(), bucket.getFormatter()); newBuckets.add(newBucket); } else { newBuckets.add(bucket); } lastBucketValue = thisBucketValue; } return factory.create(histo.getName(), newBuckets, null, 1, null, null, false, new ArrayList<Reducer>(), histo.getMetaData()); // NOCOMMIT get order, minDocCount, emptyBucketInfo etc. from histo } private Double resolveBucketValue(InternalHistogram<? extends InternalHistogram.Bucket> histo, InternalHistogram.Bucket bucket) { try { Object propertyValue = bucket.getProperty(histo.getName(), AggregationPath.parse(bucketsPaths()[0]) .getPathElementsAsStringList()); if (propertyValue instanceof Number) { return ((Number) propertyValue).doubleValue(); } else if (propertyValue instanceof InternalNumericMetricsAggregation.SingleValue) { return ((InternalNumericMetricsAggregation.SingleValue) propertyValue).value(); } else { throw new AggregationExecutionException(DerivativeParser.BUCKETS_PATH.getPreferredName() + " must reference either a number value or a single value numeric metric aggregation"); } } catch (InvalidAggregationPathException e) { return null; } } @Override public void doReadFrom(StreamInput in) throws IOException { formatter = ValueFormatterStreams.readOptional(in); } @Override public void doWriteTo(StreamOutput out) throws IOException { ValueFormatterStreams.writeOptional(formatter, out); } public static class Factory extends ReducerFactory { private final ValueFormatter formatter; public Factory(String name, String[] bucketsPaths, @Nullable ValueFormatter formatter) { super(name, TYPE.name(), bucketsPaths); this.formatter = formatter; } @Override protected Reducer createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException { return new DerivativeReducer(name, bucketsPaths, formatter, metaData); } } }
src/main/java/org/elasticsearch/search/aggregations/reducers/derivative/DerivativeReducer.java
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.reducers.derivative; import com.google.common.base.Function; import com.google.common.collect.Lists; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation.Type; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import org.elasticsearch.search.aggregations.reducers.InternalSimpleValue; import org.elasticsearch.search.aggregations.reducers.Reducer; import org.elasticsearch.search.aggregations.reducers.ReducerFactory; import org.elasticsearch.search.aggregations.reducers.ReducerStreams; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; public class DerivativeReducer extends Reducer { public final static Type TYPE = new Type("derivative"); public final static ReducerStreams.Stream STREAM = new ReducerStreams.Stream() { @Override public DerivativeReducer readResult(StreamInput in) throws IOException { DerivativeReducer result = new DerivativeReducer(); result.readFrom(in); return result; } }; public static void registerStreams() { ReducerStreams.registerStream(STREAM, TYPE.stream()); } private static final Function<Aggregation, InternalAggregation> FUNCTION = new Function<Aggregation, InternalAggregation>() { @Override public InternalAggregation apply(Aggregation input) { return (InternalAggregation) input; } }; private ValueFormatter formatter; public DerivativeReducer() { } public DerivativeReducer(String name, String[] bucketsPaths, @Nullable ValueFormatter formatter, Map<String, Object> metadata) { super(name, bucketsPaths, metadata); this.formatter = formatter; } @Override public Type type() { return TYPE; } @Override public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) { InternalHistogram<? extends InternalHistogram.Bucket> histo = (InternalHistogram<? extends InternalHistogram.Bucket>) aggregation; List<? extends InternalHistogram.Bucket> buckets = histo.getBuckets(); InternalHistogram.Factory<? extends InternalHistogram.Bucket> factory = histo.getFactory(); List newBuckets = new ArrayList<>(); Double lastBucketValue = null; // NOCOMMIT this needs to be improved so that the aggs are cloned correctly to ensure aggs are fully immutable. for (InternalHistogram.Bucket bucket : buckets) { double thisBucketValue = resolveBucketValue(histo, bucket); if (lastBucketValue != null) { double diff = thisBucketValue - lastBucketValue; List<InternalAggregation> aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(), FUNCTION)); aggs.add(new InternalSimpleValue(name(), diff, formatter, new ArrayList<Reducer>(), metaData())); InternalHistogram.Bucket newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(), new InternalAggregations( aggs), bucket.getKeyed(), bucket.getFormatter()); newBuckets.add(newBucket); } else { newBuckets.add(bucket); } lastBucketValue = thisBucketValue; } return factory.create(histo.getName(), newBuckets, null, 1, null, null, false, new ArrayList<Reducer>(), histo.getMetaData()); // NOCOMMIT get order, minDocCount, emptyBucketInfo etc. from histo } private double resolveBucketValue(InternalHistogram<? extends InternalHistogram.Bucket> histo, InternalHistogram.Bucket bucket) { Object propertyValue = bucket.getProperty(histo.getName(), AggregationPath.parse(bucketsPaths()[0]) .getPathElementsAsStringList()); if (propertyValue instanceof Number) { return ((Number) propertyValue).doubleValue(); } else if (propertyValue instanceof InternalNumericMetricsAggregation.SingleValue) { return ((InternalNumericMetricsAggregation.SingleValue) propertyValue).value(); } else { throw new AggregationExecutionException(DerivativeParser.BUCKETS_PATH.getPreferredName() + "must reference either a number value or a single value numeric metric aggregation"); } } @Override public void doReadFrom(StreamInput in) throws IOException { formatter = ValueFormatterStreams.readOptional(in); } @Override public void doWriteTo(StreamOutput out) throws IOException { ValueFormatterStreams.writeOptional(formatter, out); } public static class Factory extends ReducerFactory { private final ValueFormatter formatter; public Factory(String name, String[] bucketsPaths, @Nullable ValueFormatter formatter) { super(name, TYPE.name(), bucketsPaths); this.formatter = formatter; } @Override protected Reducer createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket, Map<String, Object> metaData) throws IOException { return new DerivativeReducer(name, bucketsPaths, formatter, metaData); } } }
Derivative Reducer now supported nth order derivatives
src/main/java/org/elasticsearch/search/aggregations/reducers/derivative/DerivativeReducer.java
Derivative Reducer now supported nth order derivatives
Java
apache-2.0
5a2387b25608e6e0e002015b6d6bbe4d58f9d3db
0
rodionovsasha/ShoppingList,rodionovsasha/ShoppingList,rodionovsasha/ShoppingList
package ru.rodionovsasha.shoppinglist.controllers.rest; import lombok.extern.slf4j.Slf4j; import org.springframework.http.HttpStatus; import org.springframework.validation.FieldError; import org.springframework.web.bind.MethodArgumentNotValidException; import org.springframework.web.bind.annotation.ControllerAdvice; import org.springframework.web.bind.annotation.ExceptionHandler; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.ResponseStatus; import ru.rodionovsasha.shoppinglist.dto.ValidationErrorDTO; import java.util.List; @Slf4j @ControllerAdvice public class RestExceptionHandlerController { @ExceptionHandler(MethodArgumentNotValidException.class) @ResponseStatus(HttpStatus.BAD_REQUEST) @ResponseBody public ValidationErrorDTO processValidationError(MethodArgumentNotValidException exception) { return processFieldErrors(exception.getBindingResult().getFieldErrors()); } private ValidationErrorDTO processFieldErrors(List<FieldError> fieldErrors) { ValidationErrorDTO validationErrorDTO = new ValidationErrorDTO(); log.error("Validation errors:"); fieldErrors.forEach(fieldError -> { log.error("Field '" + fieldError.getField() + "': " + fieldError.getDefaultMessage()); validationErrorDTO.addFieldError(fieldError.getField(), fieldError.getDefaultMessage()); }); return validationErrorDTO; } }
src/main/java/ru/rodionovsasha/shoppinglist/controllers/rest/RestExceptionHandlerController.java
package ru.rodionovsasha.shoppinglist.controllers.rest; import lombok.extern.slf4j.Slf4j; import org.springframework.http.HttpStatus; import org.springframework.validation.FieldError; import org.springframework.web.bind.MethodArgumentNotValidException; import org.springframework.web.bind.annotation.ControllerAdvice; import org.springframework.web.bind.annotation.ExceptionHandler; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.ResponseStatus; import ru.rodionovsasha.shoppinglist.dto.ValidationErrorDTO; import java.util.List; @Slf4j @ControllerAdvice public class RestExceptionHandlerController { @ExceptionHandler(MethodArgumentNotValidException.class) @ResponseStatus(HttpStatus.BAD_REQUEST) @ResponseBody public ValidationErrorDTO processValidationError(MethodArgumentNotValidException exception) { return processFieldErrors(exception.getBindingResult().getFieldErrors()); } private ValidationErrorDTO processFieldErrors(List<FieldError> fieldErrors) { ValidationErrorDTO validationErrorDTO = new ValidationErrorDTO(); log.error("Validation errors:"); for (FieldError fieldError: fieldErrors) { log.error("Field '" + fieldError.getField() + "': " + fieldError.getDefaultMessage()); validationErrorDTO.addFieldError(fieldError.getField(), fieldError.getDefaultMessage()); } return validationErrorDTO; } }
foreach
src/main/java/ru/rodionovsasha/shoppinglist/controllers/rest/RestExceptionHandlerController.java
foreach
Java
apache-2.0
e7713b50dd43bf0432d08f1ad9495e88491b32cc
0
stylismo/spark,rokusr/spark,gimlet2/spark,stylismo/spark,arekkw/spark,perwendel/spark,gimlet2/spark,perwendel/spark,MouettE-SC/spark,perwendel/spark,stylismo/spark,stylismo/spark,gimlet2/spark,gimlet2/spark,rokusr/spark,arekkw/spark,MouettE-SC/spark,rokusr/spark,MouettE-SC/spark
/* * Copyright 2011- Per Wendel * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package spark; import static spark.Service.ignite; /** * The main building block of a Spark application is a set of routes. A route is * made up of three simple pieces: * <ul> * <li>A verb (get, post, put, delete, head, trace, connect, options)</li> * <li>A path (/hello, /users/:name)</li> * <li>A callback (request, response)</li> * </ul> * Example: * get("/hello", (request, response) -&#62; { * return "Hello World!"; * }); * The public methods and fields in this class should be statically imported for the semantic to make sense. * Ie. one should use: * 'post("/books")' without the prefix 'Spark.' * * @author Per Wendel */ public class Spark { // Hide constructor protected Spark() { } /** * Initializes singleton. */ private static class SingletonHolder { private static final Service INSTANCE = ignite(); } private static Service getInstance() { return SingletonHolder.INSTANCE; } /** * Statically import this for redirect utility functionality, see {@link spark.Redirect} */ public static final Redirect redirect = getInstance().redirect; /** * Statically import this for static files utility functionality, see {@link spark.Service.StaticFiles} */ public static final Service.StaticFiles staticFiles = getInstance().staticFiles; /** * Add a path-prefix to the routes declared in the routeGroup * The path() method adds a path-fragment to a path-stack, adds * routes from the routeGroup, then pops the path-fragment again. * It's used for separating routes into groups, for example: * path("/api/email", () -> { * ....post("/add", EmailApi::addEmail); * ....put("/change", EmailApi::changeEmail); * ....etc * }); * Multiple path() calls can be nested. * * @param path the path to prefix routes with * @param routeGroup group of routes (can also contain path() calls) */ public static void path(String path, RouteGroup routeGroup) { getInstance().path(path, routeGroup); } /** * Map the route for HTTP GET requests * * @param path the path * @param route The route */ public static void get(final String path, final Route route) { getInstance().get(path, route); } /** * Map the route for HTTP POST requests * * @param path the path * @param route The route */ public static void post(String path, Route route) { getInstance().post(path, route); } /** * Map the route for HTTP PUT requests * * @param path the path * @param route The route */ public static void put(String path, Route route) { getInstance().put(path, route); } /** * Map the route for HTTP PATCH requests * * @param path the path * @param route The route */ public static void patch(String path, Route route) { getInstance().patch(path, route); } /** * Map the route for HTTP DELETE requests * * @param path the path * @param route The route */ public static void delete(String path, Route route) { getInstance().delete(path, route); } /** * Map the route for HTTP HEAD requests * * @param path the path * @param route The route */ public static void head(String path, Route route) { getInstance().head(path, route); } /** * Map the route for HTTP TRACE requests * * @param path the path * @param route The route */ public static void trace(String path, Route route) { getInstance().trace(path, route); } /** * Map the route for HTTP CONNECT requests * * @param path the path * @param route The route */ public static void connect(String path, Route route) { getInstance().connect(path, route); } /** * Map the route for HTTP OPTIONS requests * * @param path the path * @param route The route */ public static void options(String path, Route route) { getInstance().options(path, route); } /** * Maps a filter to be executed before any matching routes * * @param path the path * @param filter The filter */ public static void before(String path, Filter filter) { getInstance().before(path, filter); } /** * Maps an array of filters to be executed before any matching routes * * @param path the path * @param filters the filters */ public static void before(String path, Filter... filters) { for (Filter filter : filters) { getInstance().before(path, filter); } } /** * Maps a filter to be executed after any matching routes * * @param path the path * @param filter The filter */ public static void after(String path, Filter filter) { getInstance().after(path, filter); } /** * Maps an array of filters to be executed after any matching routes * * @param path the path * @param filters The filters */ public static void after(String path, Filter... filters) { for (Filter filter : filters) { getInstance().after(path, filter); } } ////////////////////////////////////////////////// // BEGIN route/filter mapping with accept type ////////////////////////////////////////////////// /** * Map the route for HTTP GET requests * * @param path the path * @param acceptType the accept type * @param route The route */ public static void get(String path, String acceptType, Route route) { getInstance().get(path, acceptType, route); } /** * Map the route for HTTP POST requests * * @param path the path * @param acceptType the accept type * @param route The route */ public static void post(String path, String acceptType, Route route) { getInstance().post(path, acceptType, route); } /** * Map the route for HTTP PUT requests * * @param path the path * @param acceptType the accept type * @param route The route */ public static void put(String path, String acceptType, Route route) { getInstance().put(path, acceptType, route); } /** * Map the route for HTTP PATCH requests * * @param path the path * @param acceptType the accept type * @param route The route */ public static void patch(String path, String acceptType, Route route) { getInstance().patch(path, acceptType, route); } /** * Map the route for HTTP DELETE requests * * @param path the path * @param acceptType the accept type * @param route The route */ public static void delete(String path, String acceptType, Route route) { getInstance().delete(path, acceptType, route); } /** * Map the route for HTTP HEAD requests * * @param path the path * @param acceptType the accept type * @param route The route */ public static void head(String path, String acceptType, Route route) { getInstance().head(path, acceptType, route); } /** * Map the route for HTTP TRACE requests * * @param path the path * @param acceptType the accept type * @param route The route */ public static void trace(String path, String acceptType, Route route) { getInstance().trace(path, acceptType, route); } /** * Map the route for HTTP CONNECT requests * * @param path the path * @param acceptType the accept type * @param route The route */ public static void connect(String path, String acceptType, Route route) { getInstance().connect(path, acceptType, route); } /** * Map the route for HTTP OPTIONS requests * * @param path the path * @param acceptType the accept type * @param route The route */ public static void options(String path, String acceptType, Route route) { getInstance().options(path, acceptType, route); } /** * Maps one or many filters to be executed before any matching routes * * @param filters The filters */ public static void before(Filter... filters) { for (Filter filter : filters) { getInstance().before(filter); } } /** * Maps one or many filters to be executed after any matching routes * * @param filters The filters */ public static void after(Filter... filters) { for (Filter filter : filters) { getInstance().after(filter); } } /** * Maps one or many filters to be executed before any matching routes * * @param path the path * @param acceptType the accept type * @param filters The filters */ public static void before(String path, String acceptType, Filter... filters) { for (Filter filter : filters) { getInstance().before(path, acceptType, filter); } } /** * Maps one or many filters to be executed after any matching routes * * @param path the path * @param acceptType the accept type * @param filters The filters */ public static void after(String path, String acceptType, Filter... filters) { for (Filter filter : filters) { getInstance().after(path, acceptType, filter); } } ////////////////////////////////////////////////// // END route/filter mapping with accept type ////////////////////////////////////////////////// ////////////////////////////////////////////////// // BEGIN Template View Routes ////////////////////////////////////////////////// /** * Map the route for HTTP GET requests * * @param path the path * @param route The route * @param engine the template engine */ public static void get(String path, TemplateViewRoute route, TemplateEngine engine) { getInstance().get(path, route, engine); } /** * Map the route for HTTP GET requests * * @param path the path * @param acceptType the accept type * @param route The route * @param engine the template engine */ public static void get(String path, String acceptType, TemplateViewRoute route, TemplateEngine engine) { getInstance().get(path, acceptType, route, engine); } /** * Map the route for HTTP POST requests * * @param path the path * @param route The route * @param engine the template engine */ public static void post(String path, TemplateViewRoute route, TemplateEngine engine) { getInstance().post(path, route, engine); } /** * Map the route for HTTP POST requests * * @param path the path * @param acceptType the accept type * @param route The route * @param engine the template engine */ public static void post(String path, String acceptType, TemplateViewRoute route, TemplateEngine engine) { getInstance().post(path, acceptType, route, engine); } /** * Map the route for HTTP PUT requests * * @param path the path * @param route The route * @param engine the template engine */ public static void put(String path, TemplateViewRoute route, TemplateEngine engine) { getInstance().put(path, route, engine); } /** * Map the route for HTTP PUT requests * * @param path the path * @param acceptType the accept type * @param route The route * @param engine the template engine */ public static void put(String path, String acceptType, TemplateViewRoute route, TemplateEngine engine) { getInstance().put(path, acceptType, route, engine); } /** * Map the route for HTTP DELETE requests * * @param path the path * @param route The route * @param engine the template engine */ public static void delete(String path, TemplateViewRoute route, TemplateEngine engine) { getInstance().delete(path, route, engine); } /** * Map the route for HTTP DELETE requests * * @param path the path * @param acceptType the accept type * @param route The route * @param engine the template engine */ public static void delete(String path, String acceptType, TemplateViewRoute route, TemplateEngine engine) { getInstance().delete(path, acceptType, route, engine); } /** * Map the route for HTTP PATCH requests * * @param path the path * @param route The route * @param engine the template engine */ public static void patch(String path, TemplateViewRoute route, TemplateEngine engine) { getInstance().patch(path, route, engine); } /** * Map the route for HTTP PATCH requests * * @param path the path * @param acceptType the accept type * @param route The route * @param engine the template engine */ public static void patch(String path, String acceptType, TemplateViewRoute route, TemplateEngine engine) { getInstance().patch(path, acceptType, route, engine); } /** * Map the route for HTTP HEAD requests * * @param path the path * @param route The route * @param engine the template engine */ public static void head(String path, TemplateViewRoute route, TemplateEngine engine) { getInstance().head(path, route, engine); } /** * Map the route for HTTP HEAD requests * * @param path the path * @param acceptType the accept type * @param route The route * @param engine the template engine */ public static void head(String path, String acceptType, TemplateViewRoute route, TemplateEngine engine) { getInstance().head(path, acceptType, route, engine); } /** * Map the route for HTTP TRACE requests * * @param path the path * @param route The route * @param engine the template engine */ public static void trace(String path, TemplateViewRoute route, TemplateEngine engine) { getInstance().trace(path, route, engine); } /** * Map the route for HTTP TRACE requests * * @param path the path * @param acceptType the accept type * @param route The route * @param engine the template engine */ public static void trace(String path, String acceptType, TemplateViewRoute route, TemplateEngine engine) { getInstance().trace(path, acceptType, route, engine); } /** * Map the route for HTTP CONNECT requests * * @param path the path * @param route The route * @param engine the template engine */ public static void connect(String path, TemplateViewRoute route, TemplateEngine engine) { getInstance().connect(path, route, engine); } /** * Map the route for HTTP CONNECT requests * * @param path the path * @param acceptType the accept type * @param route The route * @param engine the template engine */ public static void connect(String path, String acceptType, TemplateViewRoute route, TemplateEngine engine) { getInstance().connect(path, acceptType, route, engine); } /** * Map the route for HTTP OPTIONS requests * * @param path the path * @param route The route * @param engine the template engine */ public static void options(String path, TemplateViewRoute route, TemplateEngine engine) { getInstance().options(path, route, engine); } /** * Map the route for HTTP OPTIONS requests * * @param path the path * @param acceptType the accept type * @param route The route * @param engine the template engine */ public static void options(String path, String acceptType, TemplateViewRoute route, TemplateEngine engine) { getInstance().options(path, acceptType, route, engine); } ////////////////////////////////////////////////// // END Template View Routes ////////////////////////////////////////////////// ////////////////////////////////////////////////// // BEGIN Response Transforming Routes ////////////////////////////////////////////////// /** * Map the route for HTTP GET requests * * @param path the path * @param route The route * @param transformer the response transformer */ public static void get(String path, Route route, ResponseTransformer transformer) { getInstance().get(path, route, transformer); } /** * Map the route for HTTP GET requests * * @param path the path * @param acceptType the accept type * @param route The route * @param transformer the response transformer */ public static void get(String path, String acceptType, Route route, ResponseTransformer transformer) { getInstance().get(path, acceptType, route, transformer); } /** * Map the route for HTTP POST requests * * @param path the path * @param route The route * @param transformer the response transformer */ public static void post(String path, Route route, ResponseTransformer transformer) { getInstance().post(path, route, transformer); } /** * Map the route for HTTP POST requests * * @param path the path * @param acceptType the accept type * @param route The route * @param transformer the response transformer */ public static void post(String path, String acceptType, Route route, ResponseTransformer transformer) { getInstance().post(path, acceptType, route, transformer); } /** * Map the route for HTTP PUT requests * * @param path the path * @param route The route * @param transformer the response transformer */ public static void put(String path, Route route, ResponseTransformer transformer) { getInstance().put(path, route, transformer); } /** * Map the route for HTTP PUT requests * * @param path the path * @param acceptType the accept type * @param route The route * @param transformer the response transformer */ public static void put(String path, String acceptType, Route route, ResponseTransformer transformer) { getInstance().put(path, acceptType, route, transformer); } /** * Map the route for HTTP DELETE requests * * @param path the path * @param route The route * @param transformer the response transformer */ public static void delete(String path, Route route, ResponseTransformer transformer) { getInstance().delete(path, route, transformer); } /** * Map the route for HTTP DELETE requests * * @param path the path * @param acceptType the accept type * @param route The route * @param transformer the response transformer */ public static void delete(String path, String acceptType, Route route, ResponseTransformer transformer) { getInstance().delete(path, acceptType, route, transformer); } /** * Map the route for HTTP HEAD requests * * @param path the path * @param route The route * @param transformer the response transformer */ public static void head(String path, Route route, ResponseTransformer transformer) { getInstance().head(path, route, transformer); } /** * Map the route for HTTP HEAD requests * * @param path the path * @param acceptType the accept type * @param route The route * @param transformer the response transformer */ public static void head(String path, String acceptType, Route route, ResponseTransformer transformer) { getInstance().head(path, acceptType, route, transformer); } /** * Map the route for HTTP CONNECT requests * * @param path the path * @param route The route * @param transformer the response transformer */ public static void connect(String path, Route route, ResponseTransformer transformer) { getInstance().connect(path, route, transformer); } /** * Map the route for HTTP CONNECT requests * * @param path the path * @param acceptType the accept type * @param route The route * @param transformer the response transformer */ public static void connect(String path, String acceptType, Route route, ResponseTransformer transformer) { getInstance().connect(path, acceptType, route, transformer); } /** * Map the route for HTTP TRACE requests * * @param path the path * @param route The route * @param transformer the response transformer */ public static void trace(String path, Route route, ResponseTransformer transformer) { getInstance().trace(path, route, transformer); } /** * Map the route for HTTP TRACE requests * * @param path the path * @param acceptType the accept type * @param route The route * @param transformer the response transformer */ public static void trace(String path, String acceptType, Route route, ResponseTransformer transformer) { getInstance().trace(path, acceptType, route, transformer); } /** * Map the route for HTTP OPTIONS requests * * @param path the path * @param route The route * @param transformer the response transformer */ public static void options(String path, Route route, ResponseTransformer transformer) { getInstance().options(path, route, transformer); } /** * Map the route for HTTP OPTIONS requests * * @param path the path * @param acceptType the accept type * @param route The route * @param transformer the response transformer */ public static void options(String path, String acceptType, Route route, ResponseTransformer transformer) { getInstance().options(path, acceptType, route, transformer); } /** * Map the route for HTTP PATCH requests * * @param path the path * @param route The route * @param transformer the response transformer */ public static void patch(String path, Route route, ResponseTransformer transformer) { getInstance().patch(path, route, transformer); } /** * Map the route for HTTP PATCH requests * * @param path the path * @param acceptType the accept type * @param route The route * @param transformer the response transformer */ public static void patch(String path, String acceptType, Route route, ResponseTransformer transformer) { getInstance().patch(path, acceptType, route, transformer); } ////////////////////////////////////////////////// // END Response Transforming Routes ////////////////////////////////////////////////// ////////////////////////////////////////////////// // EXCEPTION mapper ////////////////////////////////////////////////// /** * Maps an exception handler to be executed when an exception occurs during routing * * @param exceptionClass the exception class * @param handler The handler */ public static void exception(Class<? extends Exception> exceptionClass, ExceptionHandler handler) { getInstance().exception(exceptionClass, handler); } ////////////////////////////////////////////////// // HALT methods ////////////////////////////////////////////////// /** * Immediately stops a request within a filter or route * NOTE: When using this don't catch exceptions of type HaltException, or if catched, re-throw otherwise * halt will not work */ public static HaltException halt() { throw getInstance().halt(); } /** * Immediately stops a request within a filter or route with specified status code * NOTE: When using this don't catch exceptions of type HaltException, or if catched, re-throw otherwise * halt will not work * * @param status the status code */ public static HaltException halt(int status) { throw getInstance().halt(status); } /** * Immediately stops a request within a filter or route with specified body content * NOTE: When using this don't catch exceptions of type HaltException, or if catched, re-throw otherwise * halt will not work * * @param body The body content */ public static HaltException halt(String body) { throw getInstance().halt(body); } /** * Immediately stops a request within a filter or route with specified status code and body content * NOTE: When using this don't catch exceptions of type HaltException, or if catched, re-throw otherwise * halt will not work * * @param status The status code * @param body The body content */ public static HaltException halt(int status, String body) { throw getInstance().halt(status, body); } /** * Set the IP address that Spark should listen on. If not called the default * address is '0.0.0.0'. This has to be called before any route mapping is * done. * * @param ipAddress The ipAddress * @deprecated replaced by {@link #ipAddress(String)} */ public static void setIpAddress(String ipAddress) { getInstance().ipAddress(ipAddress); } /** * Set the IP address that Spark should listen on. If not called the default * address is '0.0.0.0'. This has to be called before any route mapping is * done. * * @param ipAddress The ipAddress */ public static void ipAddress(String ipAddress) { getInstance().ipAddress(ipAddress); } /** * Set the port that Spark should listen on. If not called the default port * is 4567. This has to be called before any route mapping is done. * If provided port = 0 then the an arbitrary available port will be used. * * @param port The port number * @deprecated replaced by {@link #port(int)} */ public static void setPort(int port) { getInstance().port(port); } /** * Set the port that Spark should listen on. If not called the default port * is 4567. This has to be called before any route mapping is done. * If provided port = 0 then the an arbitrary available port will be used. * * @param port The port number */ public static void port(int port) { getInstance().port(port); } /** * Retrieves the port that Spark is listening on. * * @return The port Spark server is listening on. * @throws IllegalStateException when the server is not started */ public static int port() { return getInstance().port(); } /** * Set the connection to be secure, using the specified keystore and * truststore. This has to be called before any route mapping is done. You * have to supply a keystore file, truststore file is optional (keystore * will be reused). * This method is only relevant when using embedded Jetty servers. It should * not be used if you are using Servlets, where you will need to secure the * connection in the servlet container * * @param keystoreFile The keystore file location as string * @param keystorePassword the password for the keystore * @param truststoreFile the truststore file location as string, leave null to reuse * keystore * @param truststorePassword the trust store password * @deprecated replaced by {@link #secure(String, String, String, String)} */ public static void setSecure(String keystoreFile, String keystorePassword, String truststoreFile, String truststorePassword) { getInstance().secure(keystoreFile, keystorePassword, truststoreFile, truststorePassword); } /** * Set the connection to be secure, using the specified keystore and * truststore. This has to be called before any route mapping is done. You * have to supply a keystore file, truststore file is optional (keystore * will be reused). * This method is only relevant when using embedded Jetty servers. It should * not be used if you are using Servlets, where you will need to secure the * connection in the servlet container * * @param keystoreFile The keystore file location as string * @param keystorePassword the password for the keystore * @param truststoreFile the truststore file location as string, leave null to reuse * keystore * @param truststorePassword the trust store password */ public static void secure(String keystoreFile, String keystorePassword, String truststoreFile, String truststorePassword) { getInstance().secure(keystoreFile, keystorePassword, truststoreFile, truststorePassword); } /** * Configures the embedded web server's thread pool. * * @param maxThreads max nbr of threads. */ public static void threadPool(int maxThreads) { getInstance().threadPool(maxThreads); } /** * Configures the embedded web server's thread pool. * * @param maxThreads max nbr of threads. * @param minThreads min nbr of threads. * @param idleTimeoutMillis thread idle timeout (ms). */ public static void threadPool(int maxThreads, int minThreads, int idleTimeoutMillis) { getInstance().threadPool(maxThreads, minThreads, idleTimeoutMillis); } /** * Sets the folder in classpath serving static files. Observe: this method * must be called before all other methods. * - * Note: contemplate changing tonew static files paradigm {@link spark.Service.StaticFiles} * * @param folder the folder in classpath. */ public static void staticFileLocation(String folder) { getInstance().staticFileLocation(folder); } /** * Sets the external folder serving static files. <b>Observe: this method * must be called before all other methods.</b> * - * Note: contemplate use of new static files paradigm {@link spark.Service.StaticFiles} * * @param externalFolder the external folder serving static files. */ public static void externalStaticFileLocation(String externalFolder) { getInstance().externalStaticFileLocation(externalFolder); } /** * Waits for the spark server to be initialized. * If it's already initialized will return immediately */ public static void awaitInitialization() { getInstance().awaitInitialization(); } /** * Stops the Spark server and clears all routes */ public static void stop() { getInstance().stop(); } //////////////// // Websockets // /** * Maps the given path to the given WebSocket handler. * <p> * This is currently only available in the embedded server mode. * * @param path the WebSocket path. * @param handler the handler class that will manage the WebSocket connection to the given path. */ public static void webSocket(String path, Class<?> handler) { getInstance().webSocket(path, handler); } public static void webSocket(String path, Object handler) { getInstance().webSocket(path, handler); } /** * Sets the max idle timeout in milliseconds for WebSocket connections. * * @param timeoutMillis The max idle timeout in milliseconds. */ public static void webSocketIdleTimeoutMillis(int timeoutMillis) { getInstance().webSocketIdleTimeoutMillis(timeoutMillis); } /** * Maps 404 Not Found errors to the provided custom page */ public static void notFound(String page) { getInstance().notFound(page); } /** * Maps 500 internal server errors to the provided custom page */ public static void internalServerError(String page) { getInstance().internalServerError(page); } /** * Maps 404 Not Found errors to the provided route. */ public static void notFound(Route route) { getInstance().notFound(route); } /** * Maps 500 internal server errors to the provided route. */ public static void internalServerError(Route route) { getInstance().internalServerError(route); } /** * Initializes the Spark server. SHOULD just be used when using the Websockets functionality. */ public static void init() { getInstance().init(); } /** * Constructs a ModelAndView with the provided model and view name * * @param model the model * @param viewName the view name * @return the model and view */ public static ModelAndView modelAndView(Object model, String viewName) { return new ModelAndView(model, viewName); } }
src/main/java/spark/Spark.java
/* * Copyright 2011- Per Wendel * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package spark; import static spark.Service.ignite; /** * The main building block of a Spark application is a set of routes. A route is * made up of three simple pieces: * <ul> * <li>A verb (get, post, put, delete, head, trace, connect, options)</li> * <li>A path (/hello, /users/:name)</li> * <li>A callback (request, response)</li> * </ul> * Example: * get("/hello", (request, response) -&#62; { * return "Hello World!"; * }); * The public methods and fields in this class should be statically imported for the semantic to make sense. * Ie. one should use: * 'post("/books")' without the prefix 'Spark.' * * @author Per Wendel */ public class Spark { // Hide constructor protected Spark() { } /** * Initializes singleton. */ private static class SingletonHolder { private static final Service INSTANCE = ignite(); } private static Service getInstance() { return SingletonHolder.INSTANCE; } /** * Statically import this for redirect utility functionality, see {@link spark.Redirect} */ public static final Redirect redirect = getInstance().redirect; /** * Statically import this for static files utility functionality, see {@link spark.Service.StaticFiles} */ public static final Service.StaticFiles staticFiles = getInstance().staticFiles; /** * Add a path-prefix to the routes declared in the routeGroup * The path() method adds a path-fragment to a path-stack, adds * routes from the routeGroup, then pops the path-fragment again. * It's used for separating routes into groups, for example: * path("/api/email", () -> { * ....post("/add", EmailApi::addEmail); * ....put("/change", EmailApi::changeEmail); * ....etc * }); * Multiple path() calls can be nested. * * @param path the path to prefix routes with * @param routeGroup group of routes (can also contain path() calls) */ public static void path(String path, RouteGroup routeGroup) { getInstance().path(path, routeGroup); } /** * Map the route for HTTP GET requests * * @param path the path * @param route The route */ public static void get(final String path, final Route route) { getInstance().get(path, route); } /** * Map the route for HTTP POST requests * * @param path the path * @param route The route */ public static void post(String path, Route route) { getInstance().post(path, route); } /** * Map the route for HTTP PUT requests * * @param path the path * @param route The route */ public static void put(String path, Route route) { getInstance().put(path, route); } /** * Map the route for HTTP PATCH requests * * @param path the path * @param route The route */ public static void patch(String path, Route route) { getInstance().patch(path, route); } /** * Map the route for HTTP DELETE requests * * @param path the path * @param route The route */ public static void delete(String path, Route route) { getInstance().delete(path, route); } /** * Map the route for HTTP HEAD requests * * @param path the path * @param route The route */ public static void head(String path, Route route) { getInstance().head(path, route); } /** * Map the route for HTTP TRACE requests * * @param path the path * @param route The route */ public static void trace(String path, Route route) { getInstance().trace(path, route); } /** * Map the route for HTTP CONNECT requests * * @param path the path * @param route The route */ public static void connect(String path, Route route) { getInstance().connect(path, route); } /** * Map the route for HTTP OPTIONS requests * * @param path the path * @param route The route */ public static void options(String path, Route route) { getInstance().options(path, route); } /** * Maps a filter to be executed before any matching routes * * @param path the path * @param filter The filter */ public static void before(String path, Filter filter) { getInstance().before(path, filter); } /** * Maps an array of filters to be executed before any matching routes * * @param path the path * @param filters the filters */ public static void before(String path, Filter... filters) { for (Filter filter : filters) { getInstance().before(path, filter); } } /** * Maps a filter to be executed after any matching routes * * @param path the path * @param filter The filter */ public static void after(String path, Filter filter) { getInstance().after(path, filter); } /** * Maps an array of filters to be executed after any matching routes * * @param path the path * @param filters The filters */ public static void after(String path, Filter... filters) { for (Filter filter : filters) { getInstance().after(path, filter); } } ////////////////////////////////////////////////// // BEGIN route/filter mapping with accept type ////////////////////////////////////////////////// /** * Map the route for HTTP GET requests * * @param path the path * @param acceptType the accept type * @param route The route */ public static void get(String path, String acceptType, Route route) { getInstance().get(path, acceptType, route); } /** * Map the route for HTTP POST requests * * @param path the path * @param acceptType the accept type * @param route The route */ public static void post(String path, String acceptType, Route route) { getInstance().post(path, acceptType, route); } /** * Map the route for HTTP PUT requests * * @param path the path * @param acceptType the accept type * @param route The route */ public static void put(String path, String acceptType, Route route) { getInstance().put(path, acceptType, route); } /** * Map the route for HTTP PATCH requests * * @param path the path * @param acceptType the accept type * @param route The route */ public static void patch(String path, String acceptType, Route route) { getInstance().patch(path, acceptType, route); } /** * Map the route for HTTP DELETE requests * * @param path the path * @param acceptType the accept type * @param route The route */ public static void delete(String path, String acceptType, Route route) { getInstance().delete(path, acceptType, route); } /** * Map the route for HTTP HEAD requests * * @param path the path * @param acceptType the accept type * @param route The route */ public static void head(String path, String acceptType, Route route) { getInstance().head(path, acceptType, route); } /** * Map the route for HTTP TRACE requests * * @param path the path * @param acceptType the accept type * @param route The route */ public static void trace(String path, String acceptType, Route route) { getInstance().trace(path, acceptType, route); } /** * Map the route for HTTP CONNECT requests * * @param path the path * @param acceptType the accept type * @param route The route */ public static void connect(String path, String acceptType, Route route) { getInstance().connect(path, acceptType, route); } /** * Map the route for HTTP OPTIONS requests * * @param path the path * @param acceptType the accept type * @param route The route */ public static void options(String path, String acceptType, Route route) { getInstance().options(path, acceptType, route); } /** * Maps one or many filters to be executed before any matching routes * * @param filters The filters */ public static void before(Filter... filters) { for (Filter filter : filters) { getInstance().before(filter); } } /** * Maps one or many filters to be executed after any matching routes * * @param filters The filters */ public static void after(Filter... filters) { for (Filter filter : filters) { getInstance().after(filter); } } /** * Maps one or many filters to be executed before any matching routes * * @param path the path * @param acceptType the accept type * @param filters The filters */ public static void before(String path, String acceptType, Filter... filters) { for (Filter filter : filters) { getInstance().before(path, acceptType, filter); } } /** * Maps one or many filters to be executed after any matching routes * * @param path the path * @param acceptType the accept type * @param filters The filters */ public static void after(String path, String acceptType, Filter... filters) { for (Filter filter : filters) { getInstance().after(path, acceptType, filter); } } ////////////////////////////////////////////////// // END route/filter mapping with accept type ////////////////////////////////////////////////// ////////////////////////////////////////////////// // BEGIN Template View Routes ////////////////////////////////////////////////// /** * Map the route for HTTP GET requests * * @param path the path * @param route The route * @param engine the template engine */ public static void get(String path, TemplateViewRoute route, TemplateEngine engine) { getInstance().get(path, route, engine); } /** * Map the route for HTTP GET requests * * @param path the path * @param acceptType the accept type * @param route The route * @param engine the template engine */ public static void get(String path, String acceptType, TemplateViewRoute route, TemplateEngine engine) { getInstance().get(path, acceptType, route, engine); } /** * Map the route for HTTP POST requests * * @param path the path * @param route The route * @param engine the template engine */ public static void post(String path, TemplateViewRoute route, TemplateEngine engine) { getInstance().post(path, route, engine); } /** * Map the route for HTTP POST requests * * @param path the path * @param acceptType the accept type * @param route The route * @param engine the template engine */ public static void post(String path, String acceptType, TemplateViewRoute route, TemplateEngine engine) { getInstance().post(path, acceptType, route, engine); } /** * Map the route for HTTP PUT requests * * @param path the path * @param route The route * @param engine the template engine */ public static void put(String path, TemplateViewRoute route, TemplateEngine engine) { getInstance().put(path, route, engine); } /** * Map the route for HTTP PUT requests * * @param path the path * @param acceptType the accept type * @param route The route * @param engine the template engine */ public static void put(String path, String acceptType, TemplateViewRoute route, TemplateEngine engine) { getInstance().put(path, acceptType, route, engine); } /** * Map the route for HTTP DELETE requests * * @param path the path * @param route The route * @param engine the template engine */ public static void delete(String path, TemplateViewRoute route, TemplateEngine engine) { getInstance().delete(path, route, engine); } /** * Map the route for HTTP DELETE requests * * @param path the path * @param acceptType the accept type * @param route The route * @param engine the template engine */ public static void delete(String path, String acceptType, TemplateViewRoute route, TemplateEngine engine) { getInstance().delete(path, acceptType, route, engine); } /** * Map the route for HTTP PATCH requests * * @param path the path * @param route The route * @param engine the template engine */ public static void patch(String path, TemplateViewRoute route, TemplateEngine engine) { getInstance().patch(path, route, engine); } /** * Map the route for HTTP PATCH requests * * @param path the path * @param acceptType the accept type * @param route The route * @param engine the template engine */ public static void patch(String path, String acceptType, TemplateViewRoute route, TemplateEngine engine) { getInstance().patch(path, acceptType, route, engine); } /** * Map the route for HTTP HEAD requests * * @param path the path * @param route The route * @param engine the template engine */ public static void head(String path, TemplateViewRoute route, TemplateEngine engine) { getInstance().head(path, route, engine); } /** * Map the route for HTTP HEAD requests * * @param path the path * @param acceptType the accept type * @param route The route * @param engine the template engine */ public static void head(String path, String acceptType, TemplateViewRoute route, TemplateEngine engine) { getInstance().head(path, acceptType, route, engine); } /** * Map the route for HTTP TRACE requests * * @param path the path * @param route The route * @param engine the template engine */ public static void trace(String path, TemplateViewRoute route, TemplateEngine engine) { getInstance().trace(path, route, engine); } /** * Map the route for HTTP TRACE requests * * @param path the path * @param acceptType the accept type * @param route The route * @param engine the template engine */ public static void trace(String path, String acceptType, TemplateViewRoute route, TemplateEngine engine) { getInstance().trace(path, acceptType, route, engine); } /** * Map the route for HTTP CONNECT requests * * @param path the path * @param route The route * @param engine the template engine */ public static void connect(String path, TemplateViewRoute route, TemplateEngine engine) { getInstance().connect(path, route, engine); } /** * Map the route for HTTP CONNECT requests * * @param path the path * @param acceptType the accept type * @param route The route * @param engine the template engine */ public static void connect(String path, String acceptType, TemplateViewRoute route, TemplateEngine engine) { getInstance().connect(path, acceptType, route, engine); } /** * Map the route for HTTP OPTIONS requests * * @param path the path * @param route The route * @param engine the template engine */ public static void options(String path, TemplateViewRoute route, TemplateEngine engine) { getInstance().options(path, route, engine); } /** * Map the route for HTTP OPTIONS requests * * @param path the path * @param acceptType the accept type * @param route The route * @param engine the template engine */ public static void options(String path, String acceptType, TemplateViewRoute route, TemplateEngine engine) { getInstance().options(path, acceptType, route, engine); } ////////////////////////////////////////////////// // END Template View Routes ////////////////////////////////////////////////// ////////////////////////////////////////////////// // BEGIN Response Transforming Routes ////////////////////////////////////////////////// /** * Map the route for HTTP GET requests * * @param path the path * @param route The route * @param transformer the response transformer */ public static void get(String path, Route route, ResponseTransformer transformer) { getInstance().get(path, route, transformer); } /** * Map the route for HTTP GET requests * * @param path the path * @param acceptType the accept type * @param route The route * @param transformer the response transformer */ public static void get(String path, String acceptType, Route route, ResponseTransformer transformer) { getInstance().get(path, acceptType, route, transformer); } /** * Map the route for HTTP POST requests * * @param path the path * @param route The route * @param transformer the response transformer */ public static void post(String path, Route route, ResponseTransformer transformer) { getInstance().post(path, route, transformer); } /** * Map the route for HTTP POST requests * * @param path the path * @param acceptType the accept type * @param route The route * @param transformer the response transformer */ public static void post(String path, String acceptType, Route route, ResponseTransformer transformer) { getInstance().post(path, acceptType, route, transformer); } /** * Map the route for HTTP PUT requests * * @param path the path * @param route The route * @param transformer the response transformer */ public static void put(String path, Route route, ResponseTransformer transformer) { getInstance().put(path, route, transformer); } /** * Map the route for HTTP PUT requests * * @param path the path * @param acceptType the accept type * @param route The route * @param transformer the response transformer */ public static void put(String path, String acceptType, Route route, ResponseTransformer transformer) { getInstance().put(path, acceptType, route, transformer); } /** * Map the route for HTTP DELETE requests * * @param path the path * @param route The route * @param transformer the response transformer */ public static void delete(String path, Route route, ResponseTransformer transformer) { getInstance().delete(path, route, transformer); } /** * Map the route for HTTP DELETE requests * * @param path the path * @param acceptType the accept type * @param route The route * @param transformer the response transformer */ public static void delete(String path, String acceptType, Route route, ResponseTransformer transformer) { getInstance().delete(path, acceptType, route, transformer); } /** * Map the route for HTTP HEAD requests * * @param path the path * @param route The route * @param transformer the response transformer */ public static void head(String path, Route route, ResponseTransformer transformer) { getInstance().head(path, route, transformer); } /** * Map the route for HTTP HEAD requests * * @param path the path * @param acceptType the accept type * @param route The route * @param transformer the response transformer */ public static void head(String path, String acceptType, Route route, ResponseTransformer transformer) { getInstance().head(path, acceptType, route, transformer); } /** * Map the route for HTTP CONNECT requests * * @param path the path * @param route The route * @param transformer the response transformer */ public static void connect(String path, Route route, ResponseTransformer transformer) { getInstance().connect(path, route, transformer); } /** * Map the route for HTTP CONNECT requests * * @param path the path * @param acceptType the accept type * @param route The route * @param transformer the response transformer */ public static void connect(String path, String acceptType, Route route, ResponseTransformer transformer) { getInstance().connect(path, acceptType, route, transformer); } /** * Map the route for HTTP TRACE requests * * @param path the path * @param route The route * @param transformer the response transformer */ public static void trace(String path, Route route, ResponseTransformer transformer) { getInstance().trace(path, route, transformer); } /** * Map the route for HTTP TRACE requests * * @param path the path * @param acceptType the accept type * @param route The route * @param transformer the response transformer */ public static void trace(String path, String acceptType, Route route, ResponseTransformer transformer) { getInstance().trace(path, acceptType, route, transformer); } /** * Map the route for HTTP OPTIONS requests * * @param path the path * @param route The route * @param transformer the response transformer */ public static void options(String path, Route route, ResponseTransformer transformer) { getInstance().options(path, route, transformer); } /** * Map the route for HTTP OPTIONS requests * * @param path the path * @param acceptType the accept type * @param route The route * @param transformer the response transformer */ public static void options(String path, String acceptType, Route route, ResponseTransformer transformer) { getInstance().options(path, acceptType, route, transformer); } /** * Map the route for HTTP PATCH requests * * @param path the path * @param route The route * @param transformer the response transformer */ public static void patch(String path, Route route, ResponseTransformer transformer) { getInstance().patch(path, route, transformer); } /** * Map the route for HTTP PATCH requests * * @param path the path * @param acceptType the accept type * @param route The route * @param transformer the response transformer */ public static void patch(String path, String acceptType, Route route, ResponseTransformer transformer) { getInstance().patch(path, acceptType, route, transformer); } ////////////////////////////////////////////////// // END Response Transforming Routes ////////////////////////////////////////////////// ////////////////////////////////////////////////// // EXCEPTION mapper ////////////////////////////////////////////////// /** * Maps an exception handler to be executed when an exception occurs during routing * * @param exceptionClass the exception class * @param handler The handler */ public static void exception(Class<? extends Exception> exceptionClass, ExceptionHandler handler) { getInstance().exception(exceptionClass, handler); } ////////////////////////////////////////////////// // HALT methods ////////////////////////////////////////////////// /** * Immediately stops a request within a filter or route * NOTE: When using this don't catch exceptions of type HaltException, or if catched, re-throw otherwise * halt will not work */ public static void halt() { getInstance().halt(); } /** * Immediately stops a request within a filter or route with specified status code * NOTE: When using this don't catch exceptions of type HaltException, or if catched, re-throw otherwise * halt will not work * * @param status the status code */ public static void halt(int status) { getInstance().halt(status); } /** * Immediately stops a request within a filter or route with specified body content * NOTE: When using this don't catch exceptions of type HaltException, or if catched, re-throw otherwise * halt will not work * * @param body The body content */ public static void halt(String body) { getInstance().halt(body); } /** * Immediately stops a request within a filter or route with specified status code and body content * NOTE: When using this don't catch exceptions of type HaltException, or if catched, re-throw otherwise * halt will not work * * @param status The status code * @param body The body content */ public static void halt(int status, String body) { getInstance().halt(status, body); } /** * Set the IP address that Spark should listen on. If not called the default * address is '0.0.0.0'. This has to be called before any route mapping is * done. * * @param ipAddress The ipAddress * @deprecated replaced by {@link #ipAddress(String)} */ public static void setIpAddress(String ipAddress) { getInstance().ipAddress(ipAddress); } /** * Set the IP address that Spark should listen on. If not called the default * address is '0.0.0.0'. This has to be called before any route mapping is * done. * * @param ipAddress The ipAddress */ public static void ipAddress(String ipAddress) { getInstance().ipAddress(ipAddress); } /** * Set the port that Spark should listen on. If not called the default port * is 4567. This has to be called before any route mapping is done. * If provided port = 0 then the an arbitrary available port will be used. * * @param port The port number * @deprecated replaced by {@link #port(int)} */ public static void setPort(int port) { getInstance().port(port); } /** * Set the port that Spark should listen on. If not called the default port * is 4567. This has to be called before any route mapping is done. * If provided port = 0 then the an arbitrary available port will be used. * * @param port The port number */ public static void port(int port) { getInstance().port(port); } /** * Retrieves the port that Spark is listening on. * * @return The port Spark server is listening on. * @throws IllegalStateException when the server is not started */ public static int port() { return getInstance().port(); } /** * Set the connection to be secure, using the specified keystore and * truststore. This has to be called before any route mapping is done. You * have to supply a keystore file, truststore file is optional (keystore * will be reused). * This method is only relevant when using embedded Jetty servers. It should * not be used if you are using Servlets, where you will need to secure the * connection in the servlet container * * @param keystoreFile The keystore file location as string * @param keystorePassword the password for the keystore * @param truststoreFile the truststore file location as string, leave null to reuse * keystore * @param truststorePassword the trust store password * @deprecated replaced by {@link #secure(String, String, String, String)} */ public static void setSecure(String keystoreFile, String keystorePassword, String truststoreFile, String truststorePassword) { getInstance().secure(keystoreFile, keystorePassword, truststoreFile, truststorePassword); } /** * Set the connection to be secure, using the specified keystore and * truststore. This has to be called before any route mapping is done. You * have to supply a keystore file, truststore file is optional (keystore * will be reused). * This method is only relevant when using embedded Jetty servers. It should * not be used if you are using Servlets, where you will need to secure the * connection in the servlet container * * @param keystoreFile The keystore file location as string * @param keystorePassword the password for the keystore * @param truststoreFile the truststore file location as string, leave null to reuse * keystore * @param truststorePassword the trust store password */ public static void secure(String keystoreFile, String keystorePassword, String truststoreFile, String truststorePassword) { getInstance().secure(keystoreFile, keystorePassword, truststoreFile, truststorePassword); } /** * Configures the embedded web server's thread pool. * * @param maxThreads max nbr of threads. */ public static void threadPool(int maxThreads) { getInstance().threadPool(maxThreads); } /** * Configures the embedded web server's thread pool. * * @param maxThreads max nbr of threads. * @param minThreads min nbr of threads. * @param idleTimeoutMillis thread idle timeout (ms). */ public static void threadPool(int maxThreads, int minThreads, int idleTimeoutMillis) { getInstance().threadPool(maxThreads, minThreads, idleTimeoutMillis); } /** * Sets the folder in classpath serving static files. Observe: this method * must be called before all other methods. * - * Note: contemplate changing tonew static files paradigm {@link spark.Service.StaticFiles} * * @param folder the folder in classpath. */ public static void staticFileLocation(String folder) { getInstance().staticFileLocation(folder); } /** * Sets the external folder serving static files. <b>Observe: this method * must be called before all other methods.</b> * - * Note: contemplate use of new static files paradigm {@link spark.Service.StaticFiles} * * @param externalFolder the external folder serving static files. */ public static void externalStaticFileLocation(String externalFolder) { getInstance().externalStaticFileLocation(externalFolder); } /** * Waits for the spark server to be initialized. * If it's already initialized will return immediately */ public static void awaitInitialization() { getInstance().awaitInitialization(); } /** * Stops the Spark server and clears all routes */ public static void stop() { getInstance().stop(); } //////////////// // Websockets // /** * Maps the given path to the given WebSocket handler. * <p> * This is currently only available in the embedded server mode. * * @param path the WebSocket path. * @param handler the handler class that will manage the WebSocket connection to the given path. */ public static void webSocket(String path, Class<?> handler) { getInstance().webSocket(path, handler); } public static void webSocket(String path, Object handler) { getInstance().webSocket(path, handler); } /** * Sets the max idle timeout in milliseconds for WebSocket connections. * * @param timeoutMillis The max idle timeout in milliseconds. */ public static void webSocketIdleTimeoutMillis(int timeoutMillis) { getInstance().webSocketIdleTimeoutMillis(timeoutMillis); } /** * Maps 404 Not Found errors to the provided custom page */ public static void notFound(String page) { getInstance().notFound(page); } /** * Maps 500 internal server errors to the provided custom page */ public static void internalServerError(String page) { getInstance().internalServerError(page); } /** * Maps 404 Not Found errors to the provided route. */ public static void notFound(Route route) { getInstance().notFound(route); } /** * Maps 500 internal server errors to the provided route. */ public static void internalServerError(Route route) { getInstance().internalServerError(route); } /** * Initializes the Spark server. SHOULD just be used when using the Websockets functionality. */ public static void init() { getInstance().init(); } /** * Constructs a ModelAndView with the provided model and view name * * @param model the model * @param viewName the view name * @return the model and view */ public static ModelAndView modelAndView(Object model, String viewName) { return new ModelAndView(model, viewName); } }
Spark.halt() method signature should correspond to Service.halt()
src/main/java/spark/Spark.java
Spark.halt() method signature should correspond to Service.halt()
Java
apache-2.0
a38b9e137e67571d2df83a7a9505b66cffefa7c8
0
lukmajercak/hadoop,plusplusjiajia/hadoop,apurtell/hadoop,plusplusjiajia/hadoop,wwjiang007/hadoop,plusplusjiajia/hadoop,nandakumar131/hadoop,steveloughran/hadoop,apache/hadoop,JingchengDu/hadoop,lukmajercak/hadoop,apurtell/hadoop,lukmajercak/hadoop,lukmajercak/hadoop,apurtell/hadoop,steveloughran/hadoop,JingchengDu/hadoop,mapr/hadoop-common,steveloughran/hadoop,JingchengDu/hadoop,nandakumar131/hadoop,steveloughran/hadoop,lukmajercak/hadoop,steveloughran/hadoop,nandakumar131/hadoop,JingchengDu/hadoop,wwjiang007/hadoop,mapr/hadoop-common,steveloughran/hadoop,nandakumar131/hadoop,lukmajercak/hadoop,JingchengDu/hadoop,JingchengDu/hadoop,apache/hadoop,apache/hadoop,plusplusjiajia/hadoop,nandakumar131/hadoop,mapr/hadoop-common,mapr/hadoop-common,apache/hadoop,wwjiang007/hadoop,plusplusjiajia/hadoop,wwjiang007/hadoop,apache/hadoop,JingchengDu/hadoop,mapr/hadoop-common,apurtell/hadoop,nandakumar131/hadoop,apurtell/hadoop,wwjiang007/hadoop,plusplusjiajia/hadoop,apache/hadoop,nandakumar131/hadoop,apurtell/hadoop,apurtell/hadoop,lukmajercak/hadoop,steveloughran/hadoop,wwjiang007/hadoop,apache/hadoop,mapr/hadoop-common,mapr/hadoop-common,wwjiang007/hadoop,plusplusjiajia/hadoop
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.ReconfigurableBase; import org.apache.hadoop.conf.ReconfigurationException; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Trash; import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; import org.apache.hadoop.ha.HAServiceProtocol.StateChangeRequestInfo; import org.apache.hadoop.ha.HAServiceStatus; import org.apache.hadoop.ha.HealthCheckFailedException; import org.apache.hadoop.ha.ServiceFailedException; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DFSUtilClient; import org.apache.hadoop.hdfs.HAUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants.StoragePolicySatisfierMode; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.server.aliasmap.InMemoryAliasMap; import org.apache.hadoop.hdfs.server.aliasmap.InMemoryLevelDBAliasMapServer; import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeManager; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.RollingUpgradeStartupOption; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption; import org.apache.hadoop.hdfs.server.common.MetricsLoggerTask; import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory; import org.apache.hadoop.hdfs.server.common.TokenVerifier; import org.apache.hadoop.hdfs.server.namenode.ha.ActiveState; import org.apache.hadoop.hdfs.server.namenode.ha.BootstrapStandby; import org.apache.hadoop.hdfs.server.namenode.ha.HAContext; import org.apache.hadoop.hdfs.server.namenode.ha.HAState; import org.apache.hadoop.hdfs.server.namenode.ha.StandbyState; import org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeMetrics; import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress; import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgressMetrics; import org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol; import org.apache.hadoop.hdfs.server.protocol.JournalProtocol; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; import org.apache.hadoop.hdfs.server.protocol.NamenodeRegistration; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.ipc.ExternalCall; import org.apache.hadoop.ipc.RefreshCallQueueProtocol; import org.apache.hadoop.ipc.RetriableException; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.ipc.StandbyException; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.util.MBeans; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.RefreshUserMappingsProtocol; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol; import org.apache.hadoop.tools.GetUserMappingsProtocol; import org.apache.hadoop.tracing.TraceAdminProtocol; import org.apache.hadoop.tracing.TraceUtils; import org.apache.hadoop.tracing.TracerConfigurationManager; import org.apache.hadoop.util.ExitUtil.ExitException; import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.JvmPauseMonitor; import org.apache.hadoop.util.ServicePlugin; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; import org.apache.htrace.core.Tracer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.management.ObjectName; import java.io.IOException; import java.io.PrintStream; import java.net.InetSocketAddress; import java.net.URI; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.TreeSet; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_NN_NOT_BECOME_ACTIVE_IN_SAFEMODE; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_NN_NOT_BECOME_ACTIVE_IN_SAFEMODE_DEFAULT; import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_NAMENODE_RPC_PORT_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_CALLER_CONTEXT_ENABLED_KEY; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_CALLER_CONTEXT_ENABLED_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_AUTO_FAILOVER_ENABLED_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_AUTO_FAILOVER_ENABLED_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_NAMENODE_ID_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_ZKFC_PORT_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_METRICS_PERCENTILES_INTERVALS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_BACKUP_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_BACKUP_HTTP_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_BACKUP_SERVICE_RPC_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_CHECKPOINT_DIR_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_CHECKPOINT_EDITS_DIR_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTPS_BIND_HOST_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTP_BIND_HOST_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIFELINE_RPC_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIFELINE_RPC_BIND_HOST_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_METRICS_LOGGER_PERIOD_SECONDS_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_METRICS_LOGGER_PERIOD_SECONDS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_PLUGINS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_RPC_BIND_HOST_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SERVICE_RPC_BIND_HOST_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SHARED_EDITS_DIR_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_STARTUP_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SUPPORT_ALLOW_FORMAT_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SUPPORT_ALLOW_FORMAT_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMESERVICE_ID; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.FS_PROTECTED_DIRECTORIES; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_STORAGE_POLICY_SATISFIER_MODE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_REPLICATION_MAX_STREAMS_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_REPLICATION_STREAMS_HARD_LIMIT_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_REPLICATION_STREAMS_HARD_LIMIT_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_REPLICATION_WORK_MULTIPLIER_PER_ITERATION; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_REPLICATION_WORK_MULTIPLIER_PER_ITERATION_DEFAULT; import static org.apache.hadoop.util.ExitUtil.terminate; import static org.apache.hadoop.util.ToolRunner.confirmPrompt; import static org.apache.hadoop.fs.CommonConfigurationKeys.IPC_BACKOFF_ENABLE; import static org.apache.hadoop.fs.CommonConfigurationKeys.IPC_NAMESPACE; import static org.apache.hadoop.fs.CommonConfigurationKeys.IPC_BACKOFF_ENABLE_DEFAULT; /********************************************************** * NameNode serves as both directory namespace manager and * "inode table" for the Hadoop DFS. There is a single NameNode * running in any DFS deployment. (Well, except when there * is a second backup/failover NameNode, or when using federated NameNodes.) * * The NameNode controls two critical tables: * 1) filename{@literal ->}blocksequence (namespace) * 2) block{@literal ->}machinelist ("inodes") * * The first table is stored on disk and is very precious. * The second table is rebuilt every time the NameNode comes up. * * 'NameNode' refers to both this class as well as the 'NameNode server'. * The 'FSNamesystem' class actually performs most of the filesystem * management. The majority of the 'NameNode' class itself is concerned * with exposing the IPC interface and the HTTP server to the outside world, * plus some configuration management. * * NameNode implements the * {@link org.apache.hadoop.hdfs.protocol.ClientProtocol} interface, which * allows clients to ask for DFS services. * {@link org.apache.hadoop.hdfs.protocol.ClientProtocol} is not designed for * direct use by authors of DFS client code. End-users should instead use the * {@link org.apache.hadoop.fs.FileSystem} class. * * NameNode also implements the * {@link org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol} interface, * used by DataNodes that actually store DFS data blocks. These * methods are invoked repeatedly and automatically by all the * DataNodes in a DFS deployment. * * NameNode also implements the * {@link org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol} interface, * used by secondary namenodes or rebalancing processes to get partial * NameNode state, for example partial blocksMap etc. **********************************************************/ @InterfaceAudience.Private public class NameNode extends ReconfigurableBase implements NameNodeStatusMXBean, TokenVerifier<DelegationTokenIdentifier> { static{ HdfsConfiguration.init(); } private InMemoryLevelDBAliasMapServer levelDBAliasMapServer; /** * Categories of operations supported by the namenode. */ public enum OperationCategory { /** Operations that are state agnostic */ UNCHECKED, /** Read operation that does not change the namespace state */ READ, /** Write operation that changes the namespace state */ WRITE, /** Operations related to checkpointing */ CHECKPOINT, /** Operations related to {@link JournalProtocol} */ JOURNAL } /** * HDFS configuration can have three types of parameters: * <ol> * <li>Parameters that are common for all the name services in the cluster.</li> * <li>Parameters that are specific to a name service. These keys are suffixed * with nameserviceId in the configuration. For example, * "dfs.namenode.rpc-address.nameservice1".</li> * <li>Parameters that are specific to a single name node. These keys are suffixed * with nameserviceId and namenodeId in the configuration. for example, * "dfs.namenode.rpc-address.nameservice1.namenode1"</li> * </ol> * * In the latter cases, operators may specify the configuration without * any suffix, with a nameservice suffix, or with a nameservice and namenode * suffix. The more specific suffix will take precedence. * * These keys are specific to a given namenode, and thus may be configured * globally, for a nameservice, or for a specific namenode within a nameservice. */ public static final String[] NAMENODE_SPECIFIC_KEYS = { DFS_NAMENODE_RPC_ADDRESS_KEY, DFS_NAMENODE_RPC_BIND_HOST_KEY, DFS_NAMENODE_NAME_DIR_KEY, DFS_NAMENODE_EDITS_DIR_KEY, DFS_NAMENODE_SHARED_EDITS_DIR_KEY, DFS_NAMENODE_CHECKPOINT_DIR_KEY, DFS_NAMENODE_CHECKPOINT_EDITS_DIR_KEY, DFS_NAMENODE_LIFELINE_RPC_ADDRESS_KEY, DFS_NAMENODE_LIFELINE_RPC_BIND_HOST_KEY, DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, DFS_NAMENODE_SERVICE_RPC_BIND_HOST_KEY, DFS_NAMENODE_HTTP_ADDRESS_KEY, DFS_NAMENODE_HTTPS_ADDRESS_KEY, DFS_NAMENODE_HTTP_BIND_HOST_KEY, DFS_NAMENODE_HTTPS_BIND_HOST_KEY, DFS_NAMENODE_KEYTAB_FILE_KEY, DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_KEY, DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY, DFS_NAMENODE_BACKUP_ADDRESS_KEY, DFS_NAMENODE_BACKUP_HTTP_ADDRESS_KEY, DFS_NAMENODE_BACKUP_SERVICE_RPC_ADDRESS_KEY, DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, DFS_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY, DFS_HA_FENCE_METHODS_KEY, DFS_HA_ZKFC_PORT_KEY, }; /** * @see #NAMENODE_SPECIFIC_KEYS * These keys are specific to a nameservice, but may not be overridden * for a specific namenode. */ public static final String[] NAMESERVICE_SPECIFIC_KEYS = { DFS_HA_AUTO_FAILOVER_ENABLED_KEY }; private String ipcClientRPCBackoffEnable; /** A list of property that are reconfigurable at runtime. */ private final TreeSet<String> reconfigurableProperties = Sets .newTreeSet(Lists.newArrayList( DFS_HEARTBEAT_INTERVAL_KEY, DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, FS_PROTECTED_DIRECTORIES, HADOOP_CALLER_CONTEXT_ENABLED_KEY, DFS_STORAGE_POLICY_SATISFIER_MODE_KEY, DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY, DFS_NAMENODE_REPLICATION_STREAMS_HARD_LIMIT_KEY, DFS_NAMENODE_REPLICATION_WORK_MULTIPLIER_PER_ITERATION)); private static final String USAGE = "Usage: hdfs namenode [" + StartupOption.BACKUP.getName() + "] | \n\t[" + StartupOption.CHECKPOINT.getName() + "] | \n\t[" + StartupOption.FORMAT.getName() + " [" + StartupOption.CLUSTERID.getName() + " cid ] [" + StartupOption.FORCE.getName() + "] [" + StartupOption.NONINTERACTIVE.getName() + "] ] | \n\t[" + StartupOption.UPGRADE.getName() + " [" + StartupOption.CLUSTERID.getName() + " cid]" + " [" + StartupOption.RENAMERESERVED.getName() + "<k-v pairs>] ] | \n\t[" + StartupOption.UPGRADEONLY.getName() + " [" + StartupOption.CLUSTERID.getName() + " cid]" + " [" + StartupOption.RENAMERESERVED.getName() + "<k-v pairs>] ] | \n\t[" + StartupOption.ROLLBACK.getName() + "] | \n\t[" + StartupOption.ROLLINGUPGRADE.getName() + " " + RollingUpgradeStartupOption.getAllOptionString() + " ] | \n\t[" + StartupOption.IMPORT.getName() + "] | \n\t[" + StartupOption.INITIALIZESHAREDEDITS.getName() + "] | \n\t[" + StartupOption.BOOTSTRAPSTANDBY.getName() + " [" + StartupOption.FORCE.getName() + "] [" + StartupOption.NONINTERACTIVE.getName() + "] [" + StartupOption.SKIPSHAREDEDITSCHECK.getName() + "] ] | \n\t[" + StartupOption.RECOVER.getName() + " [ " + StartupOption.FORCE.getName() + "] ] | \n\t[" + StartupOption.METADATAVERSION.getName() + " ]"; public long getProtocolVersion(String protocol, long clientVersion) throws IOException { if (protocol.equals(ClientProtocol.class.getName())) { return ClientProtocol.versionID; } else if (protocol.equals(DatanodeProtocol.class.getName())){ return DatanodeProtocol.versionID; } else if (protocol.equals(NamenodeProtocol.class.getName())){ return NamenodeProtocol.versionID; } else if (protocol.equals(RefreshAuthorizationPolicyProtocol.class.getName())){ return RefreshAuthorizationPolicyProtocol.versionID; } else if (protocol.equals(RefreshUserMappingsProtocol.class.getName())){ return RefreshUserMappingsProtocol.versionID; } else if (protocol.equals(RefreshCallQueueProtocol.class.getName())) { return RefreshCallQueueProtocol.versionID; } else if (protocol.equals(GetUserMappingsProtocol.class.getName())){ return GetUserMappingsProtocol.versionID; } else if (protocol.equals(TraceAdminProtocol.class.getName())){ return TraceAdminProtocol.versionID; } else { throw new IOException("Unknown protocol to name node: " + protocol); } } /** * @deprecated Use {@link HdfsClientConfigKeys#DFS_NAMENODE_RPC_PORT_DEFAULT} * instead. */ @Deprecated public static final int DEFAULT_PORT = DFS_NAMENODE_RPC_PORT_DEFAULT; public static final Logger LOG = LoggerFactory.getLogger(NameNode.class.getName()); public static final Logger stateChangeLog = LoggerFactory.getLogger("org.apache.hadoop.hdfs.StateChange"); public static final Logger blockStateChangeLog = LoggerFactory.getLogger("BlockStateChange"); public static final HAState ACTIVE_STATE = new ActiveState(); public static final HAState STANDBY_STATE = new StandbyState(); public static final HAState OBSERVER_STATE = new StandbyState(true); private static final String NAMENODE_HTRACE_PREFIX = "namenode.htrace."; public static final Log MetricsLog = LogFactory.getLog("NameNodeMetricsLog"); protected FSNamesystem namesystem; protected final NamenodeRole role; private volatile HAState state; private final boolean haEnabled; private final HAContext haContext; protected final boolean allowStaleStandbyReads; private AtomicBoolean started = new AtomicBoolean(false); private final boolean notBecomeActiveInSafemode; private final static int HEALTH_MONITOR_WARN_THRESHOLD_MS = 5000; /** httpServer */ protected NameNodeHttpServer httpServer; private Thread emptier; /** only used for testing purposes */ protected boolean stopRequested = false; /** Registration information of this name-node */ protected NamenodeRegistration nodeRegistration; /** Activated plug-ins. */ private List<ServicePlugin> plugins; private NameNodeRpcServer rpcServer; private JvmPauseMonitor pauseMonitor; private ObjectName nameNodeStatusBeanName; protected final Tracer tracer; protected final TracerConfigurationManager tracerConfigurationManager; ScheduledThreadPoolExecutor metricsLoggerTimer; /** * The namenode address that clients will use to access this namenode * or the name service. For HA configurations using logical URI, it * will be the logical address. */ private String clientNamenodeAddress; /** Format a new filesystem. Destroys any filesystem that may already * exist at this location. **/ public static void format(Configuration conf) throws IOException { format(conf, true, true); } static NameNodeMetrics metrics; private static final StartupProgress startupProgress = new StartupProgress(); /** Return the {@link FSNamesystem} object. * @return {@link FSNamesystem} object. */ public FSNamesystem getNamesystem() { return namesystem; } public NamenodeProtocols getRpcServer() { return rpcServer; } @VisibleForTesting public HttpServer2 getHttpServer() { return httpServer.getHttpServer(); } public void queueExternalCall(ExternalCall<?> extCall) throws IOException, InterruptedException { if (rpcServer == null) { throw new RetriableException("Namenode is in startup mode"); } rpcServer.getClientRpcServer().queueCall(extCall); } public static void initMetrics(Configuration conf, NamenodeRole role) { metrics = NameNodeMetrics.create(conf, role); } public static NameNodeMetrics getNameNodeMetrics() { return metrics; } /** * Returns object used for reporting namenode startup progress. * * @return StartupProgress for reporting namenode startup progress */ public static StartupProgress getStartupProgress() { return startupProgress; } /** * Return the service name of the issued delegation token. * * @return The name service id in HA-mode, or the rpc address in non-HA mode */ public String getTokenServiceName() { return getClientNamenodeAddress(); } /** * Get the namenode address to be used by clients. * @return nn address */ public String getClientNamenodeAddress() { return clientNamenodeAddress; } /** * Set the configuration property for the service rpc address * to address */ public static void setServiceAddress(Configuration conf, String address) { LOG.info("Setting ADDRESS {}", address); conf.set(DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, address); } /** * Fetches the address for services to use when connecting to namenode * based on the value of fallback returns null if the special * address is not specified or returns the default namenode address * to be used by both clients and services. * Services here are datanodes, backup node, any non client connection */ public static InetSocketAddress getServiceAddress(Configuration conf, boolean fallback) { String addr = conf.getTrimmed(DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY); if (addr == null || addr.isEmpty()) { return fallback ? DFSUtilClient.getNNAddress(conf) : null; } return DFSUtilClient.getNNAddress(addr); } // // Common NameNode methods implementation for the active name-node role. // public NamenodeRole getRole() { return role; } boolean isRole(NamenodeRole that) { return role.equals(that); } public static String composeNotStartedMessage(NamenodeRole role) { return role + " still not started"; } /** * Given a configuration get the address of the lifeline RPC server. * If the lifeline RPC is not configured returns null. * * @param conf configuration * @return address or null */ InetSocketAddress getLifelineRpcServerAddress(Configuration conf) { String addr = getTrimmedOrNull(conf, DFS_NAMENODE_LIFELINE_RPC_ADDRESS_KEY); if (addr == null) { return null; } return NetUtils.createSocketAddr(addr); } /** * Given a configuration get the address of the service rpc server * If the service rpc is not configured returns null */ protected InetSocketAddress getServiceRpcServerAddress(Configuration conf) { return NameNode.getServiceAddress(conf, false); } protected InetSocketAddress getRpcServerAddress(Configuration conf) { return DFSUtilClient.getNNAddress(conf); } /** * Given a configuration get the bind host of the lifeline RPC server. * If the bind host is not configured returns null. * * @param conf configuration * @return bind host or null */ String getLifelineRpcServerBindHost(Configuration conf) { return getTrimmedOrNull(conf, DFS_NAMENODE_LIFELINE_RPC_BIND_HOST_KEY); } /** Given a configuration get the bind host of the service rpc server * If the bind host is not configured returns null. */ protected String getServiceRpcServerBindHost(Configuration conf) { return getTrimmedOrNull(conf, DFS_NAMENODE_SERVICE_RPC_BIND_HOST_KEY); } /** Given a configuration get the bind host of the client rpc server * If the bind host is not configured returns null. */ protected String getRpcServerBindHost(Configuration conf) { return getTrimmedOrNull(conf, DFS_NAMENODE_RPC_BIND_HOST_KEY); } /** * Gets a trimmed value from configuration, or null if no value is defined. * * @param conf configuration * @param key configuration key to get * @return trimmed value, or null if no value is defined */ private static String getTrimmedOrNull(Configuration conf, String key) { String addr = conf.getTrimmed(key); if (addr == null || addr.isEmpty()) { return null; } return addr; } /** * Modifies the configuration to contain the lifeline RPC address setting. * * @param conf configuration to modify * @param lifelineRPCAddress lifeline RPC address */ void setRpcLifelineServerAddress(Configuration conf, InetSocketAddress lifelineRPCAddress) { LOG.info("Setting lifeline RPC address {}", lifelineRPCAddress); conf.set(DFS_NAMENODE_LIFELINE_RPC_ADDRESS_KEY, NetUtils.getHostPortString(lifelineRPCAddress)); } /** * Modifies the configuration passed to contain the service rpc address setting */ protected void setRpcServiceServerAddress(Configuration conf, InetSocketAddress serviceRPCAddress) { setServiceAddress(conf, NetUtils.getHostPortString(serviceRPCAddress)); } protected void setRpcServerAddress(Configuration conf, InetSocketAddress rpcAddress) { FileSystem.setDefaultUri(conf, DFSUtilClient.getNNUri(rpcAddress)); } protected InetSocketAddress getHttpServerAddress(Configuration conf) { return getHttpAddress(conf); } /** * HTTP server address for binding the endpoint. This method is * for use by the NameNode and its derivatives. It may return * a different address than the one that should be used by clients to * connect to the NameNode. See * {@link DFSConfigKeys#DFS_NAMENODE_HTTP_BIND_HOST_KEY} * * @param conf * @return */ protected InetSocketAddress getHttpServerBindAddress(Configuration conf) { InetSocketAddress bindAddress = getHttpServerAddress(conf); // If DFS_NAMENODE_HTTP_BIND_HOST_KEY exists then it overrides the // host name portion of DFS_NAMENODE_HTTP_ADDRESS_KEY. final String bindHost = conf.getTrimmed(DFS_NAMENODE_HTTP_BIND_HOST_KEY); if (bindHost != null && !bindHost.isEmpty()) { bindAddress = new InetSocketAddress(bindHost, bindAddress.getPort()); } return bindAddress; } /** @return the NameNode HTTP address. */ public static InetSocketAddress getHttpAddress(Configuration conf) { return NetUtils.createSocketAddr( conf.getTrimmed(DFS_NAMENODE_HTTP_ADDRESS_KEY, DFS_NAMENODE_HTTP_ADDRESS_DEFAULT)); } protected void loadNamesystem(Configuration conf) throws IOException { this.namesystem = FSNamesystem.loadFromDisk(conf); } NamenodeRegistration getRegistration() { return nodeRegistration; } NamenodeRegistration setRegistration() { nodeRegistration = new NamenodeRegistration( NetUtils.getHostPortString(getNameNodeAddress()), NetUtils.getHostPortString(getHttpAddress()), getFSImage().getStorage(), getRole()); return nodeRegistration; } /* optimize ugi lookup for RPC operations to avoid a trip through * UGI.getCurrentUser which is synch'ed */ public static UserGroupInformation getRemoteUser() throws IOException { UserGroupInformation ugi = Server.getRemoteUser(); return (ugi != null) ? ugi : UserGroupInformation.getCurrentUser(); } @Override public void verifyToken(DelegationTokenIdentifier id, byte[] password) throws IOException { // during startup namesystem is null, let client retry if (namesystem == null) { throw new RetriableException("Namenode is in startup mode"); } namesystem.verifyToken(id, password); } /** * Login as the configured user for the NameNode. */ void loginAsNameNodeUser(Configuration conf) throws IOException { InetSocketAddress socAddr = getRpcServerAddress(conf); SecurityUtil.login(conf, DFS_NAMENODE_KEYTAB_FILE_KEY, DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, socAddr.getHostName()); } /** * Initialize name-node. * * @param conf the configuration */ protected void initialize(Configuration conf) throws IOException { if (conf.get(HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS) == null) { String intervals = conf.get(DFS_METRICS_PERCENTILES_INTERVALS_KEY); if (intervals != null) { conf.set(HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS, intervals); } } UserGroupInformation.setConfiguration(conf); loginAsNameNodeUser(conf); NameNode.initMetrics(conf, this.getRole()); StartupProgressMetrics.register(startupProgress); pauseMonitor = new JvmPauseMonitor(); pauseMonitor.init(conf); pauseMonitor.start(); metrics.getJvmMetrics().setPauseMonitor(pauseMonitor); if (NamenodeRole.NAMENODE == role) { startHttpServer(conf); } loadNamesystem(conf); startAliasMapServerIfNecessary(conf); rpcServer = createRpcServer(conf); initReconfigurableBackoffKey(); if (clientNamenodeAddress == null) { // This is expected for MiniDFSCluster. Set it now using // the RPC server's bind address. clientNamenodeAddress = NetUtils.getHostPortString(getNameNodeAddress()); LOG.info("Clients are to use " + clientNamenodeAddress + " to access" + " this namenode/service."); } if (NamenodeRole.NAMENODE == role) { httpServer.setNameNodeAddress(getNameNodeAddress()); httpServer.setFSImage(getFSImage()); if (levelDBAliasMapServer != null) { httpServer.setAliasMap(levelDBAliasMapServer.getAliasMap()); } } startCommonServices(conf); startMetricsLogger(conf); } @VisibleForTesting public InMemoryLevelDBAliasMapServer getAliasMapServer() { return levelDBAliasMapServer; } private void startAliasMapServerIfNecessary(Configuration conf) throws IOException { if (conf.getBoolean(DFSConfigKeys.DFS_NAMENODE_PROVIDED_ENABLED, DFSConfigKeys.DFS_NAMENODE_PROVIDED_ENABLED_DEFAULT) && conf.getBoolean(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_ENABLED, DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_ENABLED_DEFAULT)) { levelDBAliasMapServer = new InMemoryLevelDBAliasMapServer( InMemoryAliasMap::init, namesystem.getBlockPoolId()); levelDBAliasMapServer.setConf(conf); levelDBAliasMapServer.start(); } } private void initReconfigurableBackoffKey() { ipcClientRPCBackoffEnable = buildBackoffEnableKey(rpcServer .getClientRpcServer().getPort()); reconfigurableProperties.add(ipcClientRPCBackoffEnable); } static String buildBackoffEnableKey(final int port) { // format used to construct backoff enable key, e.g. ipc.8020.backoff.enable String format = "%s.%d.%s"; return String.format(format, IPC_NAMESPACE, port, IPC_BACKOFF_ENABLE); } /** * Start a timer to periodically write NameNode metrics to the log * file. This behavior can be disabled by configuration. * @param conf */ protected void startMetricsLogger(Configuration conf) { long metricsLoggerPeriodSec = conf.getInt(DFS_NAMENODE_METRICS_LOGGER_PERIOD_SECONDS_KEY, DFS_NAMENODE_METRICS_LOGGER_PERIOD_SECONDS_DEFAULT); if (metricsLoggerPeriodSec <= 0) { return; } MetricsLoggerTask.makeMetricsLoggerAsync(MetricsLog); // Schedule the periodic logging. metricsLoggerTimer = new ScheduledThreadPoolExecutor(1); metricsLoggerTimer.setExecuteExistingDelayedTasksAfterShutdownPolicy( false); metricsLoggerTimer.scheduleWithFixedDelay(new MetricsLoggerTask(MetricsLog, "NameNode", (short) 128), metricsLoggerPeriodSec, metricsLoggerPeriodSec, TimeUnit.SECONDS); } protected void stopMetricsLogger() { if (metricsLoggerTimer != null) { metricsLoggerTimer.shutdown(); metricsLoggerTimer = null; } } /** * Create the RPC server implementation. Used as an extension point for the * BackupNode. */ protected NameNodeRpcServer createRpcServer(Configuration conf) throws IOException { return new NameNodeRpcServer(conf, this); } /** Start the services common to active and standby states */ private void startCommonServices(Configuration conf) throws IOException { namesystem.startCommonServices(conf, haContext); registerNNSMXBean(); if (NamenodeRole.NAMENODE != role) { startHttpServer(conf); httpServer.setNameNodeAddress(getNameNodeAddress()); httpServer.setFSImage(getFSImage()); if (levelDBAliasMapServer != null) { httpServer.setAliasMap(levelDBAliasMapServer.getAliasMap()); } } rpcServer.start(); try { plugins = conf.getInstances(DFS_NAMENODE_PLUGINS_KEY, ServicePlugin.class); } catch (RuntimeException e) { String pluginsValue = conf.get(DFS_NAMENODE_PLUGINS_KEY); LOG.error("Unable to load NameNode plugins. Specified list of plugins: " + pluginsValue, e); throw e; } for (ServicePlugin p: plugins) { try { p.start(this); } catch (Throwable t) { LOG.warn("ServicePlugin " + p + " could not be started", t); } } LOG.info(getRole() + " RPC up at: " + getNameNodeAddress()); if (rpcServer.getServiceRpcAddress() != null) { LOG.info(getRole() + " service RPC up at: " + rpcServer.getServiceRpcAddress()); } } private void stopCommonServices() { if(rpcServer != null) rpcServer.stop(); if(namesystem != null) namesystem.close(); if (pauseMonitor != null) pauseMonitor.stop(); if (plugins != null) { for (ServicePlugin p : plugins) { try { p.stop(); } catch (Throwable t) { LOG.warn("ServicePlugin " + p + " could not be stopped", t); } } } stopHttpServer(); } private void startTrashEmptier(final Configuration conf) throws IOException { long trashInterval = conf.getLong(FS_TRASH_INTERVAL_KEY, FS_TRASH_INTERVAL_DEFAULT); if (trashInterval == 0) { return; } else if (trashInterval < 0) { throw new IOException("Cannot start trash emptier with negative interval." + " Set " + FS_TRASH_INTERVAL_KEY + " to a positive value."); } // This may be called from the transitionToActive code path, in which // case the current user is the administrator, not the NN. The trash // emptier needs to run as the NN. See HDFS-3972. FileSystem fs = SecurityUtil.doAsLoginUser( new PrivilegedExceptionAction<FileSystem>() { @Override public FileSystem run() throws IOException { return FileSystem.get(conf); } }); this.emptier = new Thread(new Trash(fs, conf).getEmptier(), "Trash Emptier"); this.emptier.setDaemon(true); this.emptier.start(); } private void stopTrashEmptier() { if (this.emptier != null) { emptier.interrupt(); emptier = null; } } private void startHttpServer(final Configuration conf) throws IOException { httpServer = new NameNodeHttpServer(conf, this, getHttpServerBindAddress(conf)); httpServer.start(); httpServer.setStartupProgress(startupProgress); } private void stopHttpServer() { try { if (httpServer != null) httpServer.stop(); } catch (Exception e) { LOG.error("Exception while stopping httpserver", e); } } /** * Start NameNode. * <p> * The name-node can be started with one of the following startup options: * <ul> * <li>{@link StartupOption#REGULAR REGULAR} - normal name node startup</li> * <li>{@link StartupOption#FORMAT FORMAT} - format name node</li> * <li>{@link StartupOption#BACKUP BACKUP} - start backup node</li> * <li>{@link StartupOption#CHECKPOINT CHECKPOINT} - start checkpoint node</li> * <li>{@link StartupOption#UPGRADE UPGRADE} - start the cluster * <li>{@link StartupOption#UPGRADEONLY UPGRADEONLY} - upgrade the cluster * upgrade and create a snapshot of the current file system state</li> * <li>{@link StartupOption#RECOVER RECOVERY} - recover name node * metadata</li> * <li>{@link StartupOption#ROLLBACK ROLLBACK} - roll the * cluster back to the previous state</li> * <li>{@link StartupOption#IMPORT IMPORT} - import checkpoint</li> * </ul> * The option is passed via configuration field: * <tt>dfs.namenode.startup</tt> * * The conf will be modified to reflect the actual ports on which * the NameNode is up and running if the user passes the port as * <code>zero</code> in the conf. * * @param conf confirguration * @throws IOException */ public NameNode(Configuration conf) throws IOException { this(conf, NamenodeRole.NAMENODE); } protected NameNode(Configuration conf, NamenodeRole role) throws IOException { super(conf); this.tracer = new Tracer.Builder("NameNode"). conf(TraceUtils.wrapHadoopConf(NAMENODE_HTRACE_PREFIX, conf)). build(); this.tracerConfigurationManager = new TracerConfigurationManager(NAMENODE_HTRACE_PREFIX, conf); this.role = role; String nsId = getNameServiceId(conf); String namenodeId = HAUtil.getNameNodeId(conf, nsId); clientNamenodeAddress = NameNodeUtils.getClientNamenodeAddress( conf, nsId); if (clientNamenodeAddress != null) { LOG.info("Clients should use {} to access" + " this namenode/service.", clientNamenodeAddress); } this.haEnabled = HAUtil.isHAEnabled(conf, nsId); state = createHAState(getStartupOption(conf)); this.allowStaleStandbyReads = HAUtil.shouldAllowStandbyReads(conf); this.haContext = createHAContext(); try { initializeGenericKeys(conf, nsId, namenodeId); initialize(getConf()); state.prepareToEnterState(haContext); try { haContext.writeLock(); state.enterState(haContext); } finally { haContext.writeUnlock(); } } catch (IOException e) { this.stopAtException(e); throw e; } catch (HadoopIllegalArgumentException e) { this.stopAtException(e); throw e; } notBecomeActiveInSafemode = conf.getBoolean( DFS_HA_NN_NOT_BECOME_ACTIVE_IN_SAFEMODE, DFS_HA_NN_NOT_BECOME_ACTIVE_IN_SAFEMODE_DEFAULT); this.started.set(true); } private void stopAtException(Exception e){ try { this.stop(); } catch (Exception ex) { LOG.warn("Encountered exception when handling exception (" + e.getMessage() + "):", ex); } } protected HAState createHAState(StartupOption startOpt) { if (!haEnabled || startOpt == StartupOption.UPGRADE || startOpt == StartupOption.UPGRADEONLY) { return ACTIVE_STATE; } else if (startOpt == StartupOption.OBSERVER) { return OBSERVER_STATE; } else { return STANDBY_STATE; } } protected HAContext createHAContext() { return new NameNodeHAContext(); } /** * Wait for service to finish. * (Normally, it runs forever.) */ public void join() { try { rpcServer.join(); } catch (InterruptedException ie) { LOG.info("Caught interrupted exception ", ie); } } /** * Stop all NameNode threads and wait for all to finish. */ public void stop() { synchronized(this) { if (stopRequested) return; stopRequested = true; } try { if (state != null) { state.exitState(haContext); } } catch (ServiceFailedException e) { LOG.warn("Encountered exception while exiting state ", e); } finally { stopMetricsLogger(); stopCommonServices(); if (metrics != null) { metrics.shutdown(); } if (namesystem != null) { namesystem.shutdown(); } if (nameNodeStatusBeanName != null) { MBeans.unregister(nameNodeStatusBeanName); nameNodeStatusBeanName = null; } if (levelDBAliasMapServer != null) { levelDBAliasMapServer.close(); } } tracer.close(); } synchronized boolean isStopRequested() { return stopRequested; } /** * Is the cluster currently in safe mode? */ public boolean isInSafeMode() { return namesystem.isInSafeMode(); } /** get FSImage */ @VisibleForTesting public FSImage getFSImage() { return namesystem.getFSImage(); } /** * @return NameNode RPC address */ public InetSocketAddress getNameNodeAddress() { return rpcServer.getRpcAddress(); } /** * @return The auxiliary nameNode RPC addresses, or empty set if there * is none. */ public Set<InetSocketAddress> getAuxiliaryNameNodeAddresses() { return rpcServer.getAuxiliaryRpcAddresses(); } /** * @return NameNode RPC address in "host:port" string form */ public String getNameNodeAddressHostPortString() { return NetUtils.getHostPortString(getNameNodeAddress()); } /** * Return a host:port format string corresponds to an auxiliary * port configured on NameNode. If there are multiple auxiliary ports, * an arbitrary one is returned. If there is no auxiliary listener, returns * null. * * @return a string of format host:port that points to an auxiliary NameNode * address, or null if there is no such address. */ @VisibleForTesting public String getNNAuxiliaryRpcAddress() { Set<InetSocketAddress> auxiliaryAddrs = getAuxiliaryNameNodeAddresses(); if (auxiliaryAddrs.isEmpty()) { return null; } // since set has no particular order, returning the first element of // from the iterator is effectively arbitrary. InetSocketAddress addr = auxiliaryAddrs.iterator().next(); return NetUtils.getHostPortString(addr); } /** * @return NameNode service RPC address if configured, the * NameNode RPC address otherwise */ public InetSocketAddress getServiceRpcAddress() { final InetSocketAddress serviceAddr = rpcServer.getServiceRpcAddress(); return serviceAddr == null ? getNameNodeAddress() : serviceAddr; } /** * @return NameNode HTTP address, used by the Web UI, image transfer, * and HTTP-based file system clients like WebHDFS */ public InetSocketAddress getHttpAddress() { return httpServer.getHttpAddress(); } /** * @return NameNode HTTPS address, used by the Web UI, image transfer, * and HTTP-based file system clients like WebHDFS */ public InetSocketAddress getHttpsAddress() { return httpServer.getHttpsAddress(); } /** * NameNodeHttpServer, used by unit tests to ensure a full shutdown, * so that no bind exception is thrown during restart. */ @VisibleForTesting public void joinHttpServer() { if (httpServer != null) { try { httpServer.join(); } catch (InterruptedException e) { LOG.info("Caught InterruptedException joining NameNodeHttpServer", e); Thread.currentThread().interrupt(); } } } /** * Verify that configured directories exist, then * Interactively confirm that formatting is desired * for each existing directory and format them. * * @param conf configuration to use * @param force if true, format regardless of whether dirs exist * @return true if formatting was aborted, false otherwise * @throws IOException */ private static boolean format(Configuration conf, boolean force, boolean isInteractive) throws IOException { String nsId = DFSUtil.getNamenodeNameServiceId(conf); String namenodeId = HAUtil.getNameNodeId(conf, nsId); initializeGenericKeys(conf, nsId, namenodeId); checkAllowFormat(conf); if (UserGroupInformation.isSecurityEnabled()) { InetSocketAddress socAddr = DFSUtilClient.getNNAddress(conf); SecurityUtil.login(conf, DFS_NAMENODE_KEYTAB_FILE_KEY, DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, socAddr.getHostName()); } Collection<URI> nameDirsToFormat = FSNamesystem.getNamespaceDirs(conf); List<URI> sharedDirs = FSNamesystem.getSharedEditsDirs(conf); List<URI> dirsToPrompt = new ArrayList<URI>(); dirsToPrompt.addAll(nameDirsToFormat); dirsToPrompt.addAll(sharedDirs); List<URI> editDirsToFormat = FSNamesystem.getNamespaceEditsDirs(conf); // if clusterID is not provided - see if you can find the current one String clusterId = StartupOption.FORMAT.getClusterId(); if(clusterId == null || clusterId.equals("")) { //Generate a new cluster id clusterId = NNStorage.newClusterID(); } System.out.println("Formatting using clusterid: " + clusterId); FSImage fsImage = new FSImage(conf, nameDirsToFormat, editDirsToFormat); try { FSNamesystem fsn = new FSNamesystem(conf, fsImage); fsImage.getEditLog().initJournalsForWrite(); // Abort NameNode format if reformat is disabled and if // meta-dir already exists if (conf.getBoolean(DFSConfigKeys.DFS_REFORMAT_DISABLED, DFSConfigKeys.DFS_REFORMAT_DISABLED_DEFAULT)) { force = false; isInteractive = false; for (StorageDirectory sd : fsImage.storage.dirIterable(null)) { if (sd.hasSomeData()) { throw new NameNodeFormatException( "NameNode format aborted as reformat is disabled for " + "this cluster."); } } } if (!fsImage.confirmFormat(force, isInteractive)) { return true; // aborted } fsImage.format(fsn, clusterId, force); } catch (IOException ioe) { LOG.warn("Encountered exception during format: ", ioe); fsImage.close(); throw ioe; } return false; } public static void checkAllowFormat(Configuration conf) throws IOException { if (!conf.getBoolean(DFS_NAMENODE_SUPPORT_ALLOW_FORMAT_KEY, DFS_NAMENODE_SUPPORT_ALLOW_FORMAT_DEFAULT)) { throw new IOException("The option " + DFS_NAMENODE_SUPPORT_ALLOW_FORMAT_KEY + " is set to false for this filesystem, so it " + "cannot be formatted. You will need to set " + DFS_NAMENODE_SUPPORT_ALLOW_FORMAT_KEY +" parameter " + "to true in order to format this filesystem"); } } @VisibleForTesting public static boolean initializeSharedEdits(Configuration conf) throws IOException { return initializeSharedEdits(conf, true); } @VisibleForTesting public static boolean initializeSharedEdits(Configuration conf, boolean force) throws IOException { return initializeSharedEdits(conf, force, false); } /** * Clone the supplied configuration but remove the shared edits dirs. * * @param conf Supplies the original configuration. * @return Cloned configuration without the shared edit dirs. * @throws IOException on failure to generate the configuration. */ private static Configuration getConfigurationWithoutSharedEdits( Configuration conf) throws IOException { List<URI> editsDirs = FSNamesystem.getNamespaceEditsDirs(conf, false); String editsDirsString = Joiner.on(",").join(editsDirs); Configuration confWithoutShared = new Configuration(conf); confWithoutShared.unset(DFSConfigKeys.DFS_NAMENODE_SHARED_EDITS_DIR_KEY); confWithoutShared.setStrings(DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY, editsDirsString); return confWithoutShared; } /** * Format a new shared edits dir and copy in enough edit log segments so that * the standby NN can start up. * * @param conf configuration * @param force format regardless of whether or not the shared edits dir exists * @param interactive prompt the user when a dir exists * @return true if the command aborts, false otherwise */ private static boolean initializeSharedEdits(Configuration conf, boolean force, boolean interactive) throws IOException { String nsId = DFSUtil.getNamenodeNameServiceId(conf); String namenodeId = HAUtil.getNameNodeId(conf, nsId); initializeGenericKeys(conf, nsId, namenodeId); if (conf.get(DFSConfigKeys.DFS_NAMENODE_SHARED_EDITS_DIR_KEY) == null) { LOG.error("No shared edits directory configured for namespace " + nsId + " namenode " + namenodeId); return false; } if (UserGroupInformation.isSecurityEnabled()) { InetSocketAddress socAddr = DFSUtilClient.getNNAddress(conf); SecurityUtil.login(conf, DFS_NAMENODE_KEYTAB_FILE_KEY, DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, socAddr.getHostName()); } NNStorage existingStorage = null; FSImage sharedEditsImage = null; try { FSNamesystem fsns = FSNamesystem.loadFromDisk(getConfigurationWithoutSharedEdits(conf)); existingStorage = fsns.getFSImage().getStorage(); NamespaceInfo nsInfo = existingStorage.getNamespaceInfo(); List<URI> sharedEditsDirs = FSNamesystem.getSharedEditsDirs(conf); sharedEditsImage = new FSImage(conf, Lists.<URI>newArrayList(), sharedEditsDirs); sharedEditsImage.getEditLog().initJournalsForWrite(); if (!sharedEditsImage.confirmFormat(force, interactive)) { return true; // abort } NNStorage newSharedStorage = sharedEditsImage.getStorage(); // Call Storage.format instead of FSImage.format here, since we don't // actually want to save a checkpoint - just prime the dirs with // the existing namespace info newSharedStorage.format(nsInfo); sharedEditsImage.getEditLog().formatNonFileJournals(nsInfo, force); // Need to make sure the edit log segments are in good shape to initialize // the shared edits dir. fsns.getFSImage().getEditLog().close(); fsns.getFSImage().getEditLog().initJournalsForWrite(); fsns.getFSImage().getEditLog().recoverUnclosedStreams(); copyEditLogSegmentsToSharedDir(fsns, sharedEditsDirs, newSharedStorage, conf); } catch (IOException ioe) { LOG.error("Could not initialize shared edits dir", ioe); return true; // aborted } finally { if (sharedEditsImage != null) { try { sharedEditsImage.close(); } catch (IOException ioe) { LOG.warn("Could not close sharedEditsImage", ioe); } } // Have to unlock storage explicitly for the case when we're running in a // unit test, which runs in the same JVM as NNs. if (existingStorage != null) { try { existingStorage.unlockAll(); } catch (IOException ioe) { LOG.warn("Could not unlock storage directories", ioe); return true; // aborted } } } return false; // did not abort } private static void copyEditLogSegmentsToSharedDir(FSNamesystem fsns, Collection<URI> sharedEditsDirs, NNStorage newSharedStorage, Configuration conf) throws IOException { Preconditions.checkArgument(!sharedEditsDirs.isEmpty(), "No shared edits specified"); // Copy edit log segments into the new shared edits dir. List<URI> sharedEditsUris = new ArrayList<URI>(sharedEditsDirs); FSEditLog newSharedEditLog = new FSEditLog(conf, newSharedStorage, sharedEditsUris); newSharedEditLog.initJournalsForWrite(); newSharedEditLog.recoverUnclosedStreams(); FSEditLog sourceEditLog = fsns.getFSImage().editLog; long fromTxId = fsns.getFSImage().getMostRecentCheckpointTxId(); Collection<EditLogInputStream> streams = null; try { streams = sourceEditLog.selectInputStreams(fromTxId + 1, 0); // Set the nextTxid to the CheckpointTxId+1 newSharedEditLog.setNextTxId(fromTxId + 1); // Copy all edits after last CheckpointTxId to shared edits dir for (EditLogInputStream stream : streams) { LOG.debug("Beginning to copy stream " + stream + " to shared edits"); FSEditLogOp op; boolean segmentOpen = false; while ((op = stream.readOp()) != null) { if (LOG.isTraceEnabled()) { LOG.trace("copying op: " + op); } if (!segmentOpen) { newSharedEditLog.startLogSegment(op.txid, false, fsns.getEffectiveLayoutVersion()); segmentOpen = true; } newSharedEditLog.logEdit(op); if (op.opCode == FSEditLogOpCodes.OP_END_LOG_SEGMENT) { newSharedEditLog.endCurrentLogSegment(false); LOG.debug("ending log segment because of END_LOG_SEGMENT op in " + stream); segmentOpen = false; } } if (segmentOpen) { LOG.debug("ending log segment because of end of stream in " + stream); newSharedEditLog.logSync(); newSharedEditLog.endCurrentLogSegment(false); segmentOpen = false; } } } finally { if (streams != null) { FSEditLog.closeAllStreams(streams); } } } @VisibleForTesting public static boolean doRollback(Configuration conf, boolean isConfirmationNeeded) throws IOException { String nsId = DFSUtil.getNamenodeNameServiceId(conf); String namenodeId = HAUtil.getNameNodeId(conf, nsId); initializeGenericKeys(conf, nsId, namenodeId); FSNamesystem nsys = new FSNamesystem(conf, new FSImage(conf)); System.err.print( "\"rollBack\" will remove the current state of the file system,\n" + "returning you to the state prior to initiating your recent.\n" + "upgrade. This action is permanent and cannot be undone. If you\n" + "are performing a rollback in an HA environment, you should be\n" + "certain that no NameNode process is running on any host."); if (isConfirmationNeeded) { if (!confirmPrompt("Roll back file system state?")) { System.err.println("Rollback aborted."); return true; } } nsys.getFSImage().doRollback(nsys); return false; } private static void printUsage(PrintStream out) { out.println(USAGE + "\n"); } @VisibleForTesting static StartupOption parseArguments(String args[]) { int argsLen = (args == null) ? 0 : args.length; StartupOption startOpt = StartupOption.REGULAR; for(int i=0; i < argsLen; i++) { String cmd = args[i]; if (StartupOption.FORMAT.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.FORMAT; for (i = i + 1; i < argsLen; i++) { if (args[i].equalsIgnoreCase(StartupOption.CLUSTERID.getName())) { i++; if (i >= argsLen) { // if no cluster id specified, return null LOG.error("Must specify a valid cluster ID after the " + StartupOption.CLUSTERID.getName() + " flag"); return null; } String clusterId = args[i]; // Make sure an id is specified and not another flag if (clusterId.isEmpty() || clusterId.equalsIgnoreCase(StartupOption.FORCE.getName()) || clusterId.equalsIgnoreCase( StartupOption.NONINTERACTIVE.getName())) { LOG.error("Must specify a valid cluster ID after the " + StartupOption.CLUSTERID.getName() + " flag"); return null; } startOpt.setClusterId(clusterId); } if (args[i].equalsIgnoreCase(StartupOption.FORCE.getName())) { startOpt.setForceFormat(true); } if (args[i].equalsIgnoreCase(StartupOption.NONINTERACTIVE.getName())) { startOpt.setInteractiveFormat(false); } } } else if (StartupOption.GENCLUSTERID.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.GENCLUSTERID; } else if (StartupOption.REGULAR.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.REGULAR; } else if (StartupOption.BACKUP.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.BACKUP; } else if (StartupOption.CHECKPOINT.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.CHECKPOINT; } else if (StartupOption.OBSERVER.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.OBSERVER; } else if (StartupOption.UPGRADE.getName().equalsIgnoreCase(cmd) || StartupOption.UPGRADEONLY.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.UPGRADE.getName().equalsIgnoreCase(cmd) ? StartupOption.UPGRADE : StartupOption.UPGRADEONLY; /* Can be followed by CLUSTERID with a required parameter or * RENAMERESERVED with an optional parameter */ while (i + 1 < argsLen) { String flag = args[i + 1]; if (flag.equalsIgnoreCase(StartupOption.CLUSTERID.getName())) { if (i + 2 < argsLen) { i += 2; startOpt.setClusterId(args[i]); } else { LOG.error("Must specify a valid cluster ID after the " + StartupOption.CLUSTERID.getName() + " flag"); return null; } } else if (flag.equalsIgnoreCase(StartupOption.RENAMERESERVED .getName())) { if (i + 2 < argsLen) { FSImageFormat.setRenameReservedPairs(args[i + 2]); i += 2; } else { FSImageFormat.useDefaultRenameReservedPairs(); i += 1; } } else { LOG.error("Unknown upgrade flag " + flag); return null; } } } else if (StartupOption.ROLLINGUPGRADE.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.ROLLINGUPGRADE; ++i; if (i >= argsLen) { LOG.error("Must specify a rolling upgrade startup option " + RollingUpgradeStartupOption.getAllOptionString()); return null; } startOpt.setRollingUpgradeStartupOption(args[i]); } else if (StartupOption.ROLLBACK.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.ROLLBACK; } else if (StartupOption.IMPORT.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.IMPORT; } else if (StartupOption.BOOTSTRAPSTANDBY.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.BOOTSTRAPSTANDBY; return startOpt; } else if (StartupOption.INITIALIZESHAREDEDITS.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.INITIALIZESHAREDEDITS; for (i = i + 1 ; i < argsLen; i++) { if (StartupOption.NONINTERACTIVE.getName().equals(args[i])) { startOpt.setInteractiveFormat(false); } else if (StartupOption.FORCE.getName().equals(args[i])) { startOpt.setForceFormat(true); } else { LOG.error("Invalid argument: " + args[i]); return null; } } return startOpt; } else if (StartupOption.RECOVER.getName().equalsIgnoreCase(cmd)) { if (startOpt != StartupOption.REGULAR) { throw new RuntimeException("Can't combine -recover with " + "other startup options."); } startOpt = StartupOption.RECOVER; while (++i < argsLen) { if (args[i].equalsIgnoreCase( StartupOption.FORCE.getName())) { startOpt.setForce(MetaRecoveryContext.FORCE_FIRST_CHOICE); } else { throw new RuntimeException("Error parsing recovery options: " + "can't understand option \"" + args[i] + "\""); } } } else if (StartupOption.METADATAVERSION.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.METADATAVERSION; } else { return null; } } return startOpt; } private static void setStartupOption(Configuration conf, StartupOption opt) { conf.set(DFS_NAMENODE_STARTUP_KEY, opt.name()); } public static StartupOption getStartupOption(Configuration conf) { return StartupOption.valueOf(conf.get(DFS_NAMENODE_STARTUP_KEY, StartupOption.REGULAR.toString())); } private static void doRecovery(StartupOption startOpt, Configuration conf) throws IOException { String nsId = DFSUtil.getNamenodeNameServiceId(conf); String namenodeId = HAUtil.getNameNodeId(conf, nsId); initializeGenericKeys(conf, nsId, namenodeId); if (startOpt.getForce() < MetaRecoveryContext.FORCE_ALL) { if (!confirmPrompt("You have selected Metadata Recovery mode. " + "This mode is intended to recover lost metadata on a corrupt " + "filesystem. Metadata recovery mode often permanently deletes " + "data from your HDFS filesystem. Please back up your edit log " + "and fsimage before trying this!\n\n" + "Are you ready to proceed? (Y/N)\n")) { System.err.println("Recovery aborted at user request.\n"); return; } } MetaRecoveryContext.LOG.info("starting recovery..."); UserGroupInformation.setConfiguration(conf); NameNode.initMetrics(conf, startOpt.toNodeRole()); FSNamesystem fsn = null; try { fsn = FSNamesystem.loadFromDisk(conf); fsn.getFSImage().saveNamespace(fsn); MetaRecoveryContext.LOG.info("RECOVERY COMPLETE"); } catch (IOException e) { MetaRecoveryContext.LOG.info("RECOVERY FAILED: caught exception", e); throw e; } catch (RuntimeException e) { MetaRecoveryContext.LOG.info("RECOVERY FAILED: caught exception", e); throw e; } finally { if (fsn != null) fsn.close(); } } /** * Verify that configured directories exist, then print the metadata versions * of the software and the image. * * @param conf configuration to use * @throws IOException */ private static boolean printMetadataVersion(Configuration conf) throws IOException { final String nsId = DFSUtil.getNamenodeNameServiceId(conf); final String namenodeId = HAUtil.getNameNodeId(conf, nsId); NameNode.initializeGenericKeys(conf, nsId, namenodeId); final FSImage fsImage = new FSImage(conf); final FSNamesystem fs = new FSNamesystem(conf, fsImage, false); return fsImage.recoverTransitionRead( StartupOption.METADATAVERSION, fs, null); } public static NameNode createNameNode(String argv[], Configuration conf) throws IOException { LOG.info("createNameNode " + Arrays.asList(argv)); if (conf == null) conf = new HdfsConfiguration(); // Parse out some generic args into Configuration. GenericOptionsParser hParser = new GenericOptionsParser(conf, argv); argv = hParser.getRemainingArgs(); // Parse the rest, NN specific args. StartupOption startOpt = parseArguments(argv); if (startOpt == null) { printUsage(System.err); return null; } setStartupOption(conf, startOpt); boolean aborted = false; switch (startOpt) { case FORMAT: aborted = format(conf, startOpt.getForceFormat(), startOpt.getInteractiveFormat()); terminate(aborted ? 1 : 0); return null; // avoid javac warning case GENCLUSTERID: System.err.println("Generating new cluster id:"); System.out.println(NNStorage.newClusterID()); terminate(0); return null; case ROLLBACK: aborted = doRollback(conf, true); terminate(aborted ? 1 : 0); return null; // avoid warning case BOOTSTRAPSTANDBY: String[] toolArgs = Arrays.copyOfRange(argv, 1, argv.length); int rc = BootstrapStandby.run(toolArgs, conf); terminate(rc); return null; // avoid warning case INITIALIZESHAREDEDITS: aborted = initializeSharedEdits(conf, startOpt.getForceFormat(), startOpt.getInteractiveFormat()); terminate(aborted ? 1 : 0); return null; // avoid warning case BACKUP: case CHECKPOINT: NamenodeRole role = startOpt.toNodeRole(); DefaultMetricsSystem.initialize(role.toString().replace(" ", "")); return new BackupNode(conf, role); case RECOVER: NameNode.doRecovery(startOpt, conf); return null; case METADATAVERSION: printMetadataVersion(conf); terminate(0); return null; // avoid javac warning case UPGRADEONLY: DefaultMetricsSystem.initialize("NameNode"); new NameNode(conf); terminate(0); return null; default: DefaultMetricsSystem.initialize("NameNode"); return new NameNode(conf); } } /** * In federation configuration is set for a set of * namenode and secondary namenode/backup/checkpointer, which are * grouped under a logical nameservice ID. The configuration keys specific * to them have suffix set to configured nameserviceId. * * This method copies the value from specific key of format key.nameserviceId * to key, to set up the generic configuration. Once this is done, only * generic version of the configuration is read in rest of the code, for * backward compatibility and simpler code changes. * * @param conf * Configuration object to lookup specific key and to set the value * to the key passed. Note the conf object is modified * @param nameserviceId name service Id (to distinguish federated NNs) * @param namenodeId the namenode ID (to distinguish HA NNs) * @see DFSUtil#setGenericConf(Configuration, String, String, String...) */ public static void initializeGenericKeys(Configuration conf, String nameserviceId, String namenodeId) { if ((nameserviceId != null && !nameserviceId.isEmpty()) || (namenodeId != null && !namenodeId.isEmpty())) { if (nameserviceId != null) { conf.set(DFS_NAMESERVICE_ID, nameserviceId); } if (namenodeId != null) { conf.set(DFS_HA_NAMENODE_ID_KEY, namenodeId); } DFSUtil.setGenericConf(conf, nameserviceId, namenodeId, NAMENODE_SPECIFIC_KEYS); DFSUtil.setGenericConf(conf, nameserviceId, null, NAMESERVICE_SPECIFIC_KEYS); } // If the RPC address is set use it to (re-)configure the default FS if (conf.get(DFS_NAMENODE_RPC_ADDRESS_KEY) != null) { URI defaultUri = URI.create(HdfsConstants.HDFS_URI_SCHEME + "://" + conf.get(DFS_NAMENODE_RPC_ADDRESS_KEY)); conf.set(FS_DEFAULT_NAME_KEY, defaultUri.toString()); if (LOG.isDebugEnabled()) { LOG.debug("Setting " + FS_DEFAULT_NAME_KEY + " to " + defaultUri.toString()); } } } /** * Get the name service Id for the node * @return name service Id or null if federation is not configured */ protected String getNameServiceId(Configuration conf) { return DFSUtil.getNamenodeNameServiceId(conf); } /** */ public static void main(String argv[]) throws Exception { if (DFSUtil.parseHelpArgument(argv, NameNode.USAGE, System.out, true)) { System.exit(0); } try { StringUtils.startupShutdownMessage(NameNode.class, argv, LOG); NameNode namenode = createNameNode(argv, null); if (namenode != null) { namenode.join(); } } catch (Throwable e) { LOG.error("Failed to start namenode.", e); terminate(1, e); } } synchronized void monitorHealth() throws HealthCheckFailedException, AccessControlException { namesystem.checkSuperuserPrivilege(); if (!haEnabled) { return; // no-op, if HA is not enabled } long start = Time.monotonicNow(); getNamesystem().checkAvailableResources(); long end = Time.monotonicNow(); if (end - start >= HEALTH_MONITOR_WARN_THRESHOLD_MS) { // log a warning if it take >= 5 seconds. LOG.warn("Remote IP {} checking available resources took {}ms", Server.getRemoteIp(), end - start); } if (!getNamesystem().nameNodeHasResourcesAvailable()) { throw new HealthCheckFailedException( "The NameNode has no resources available"); } if (notBecomeActiveInSafemode && isInSafeMode()) { throw new HealthCheckFailedException("The NameNode is configured to " + "report UNHEALTHY to ZKFC in Safemode."); } } synchronized void transitionToActive() throws ServiceFailedException, AccessControlException { namesystem.checkSuperuserPrivilege(); if (!haEnabled) { throw new ServiceFailedException("HA for namenode is not enabled"); } if (state == OBSERVER_STATE) { throw new ServiceFailedException( "Cannot transition from '" + OBSERVER_STATE + "' to '" + ACTIVE_STATE + "'"); } if (notBecomeActiveInSafemode && isInSafeMode()) { throw new ServiceFailedException(getRole() + " still not leave safemode"); } state.setState(haContext, ACTIVE_STATE); } synchronized void transitionToStandby() throws ServiceFailedException, AccessControlException { namesystem.checkSuperuserPrivilege(); if (!haEnabled) { throw new ServiceFailedException("HA for namenode is not enabled"); } state.setState(haContext, STANDBY_STATE); } synchronized void transitionToObserver() throws ServiceFailedException, AccessControlException { namesystem.checkSuperuserPrivilege(); if (!haEnabled) { throw new ServiceFailedException("HA for namenode is not enabled"); } // Transition from ACTIVE to OBSERVER is forbidden. if (state == ACTIVE_STATE) { throw new ServiceFailedException( "Cannot transition from '" + ACTIVE_STATE + "' to '" + OBSERVER_STATE + "'"); } state.setState(haContext, OBSERVER_STATE); } synchronized HAServiceStatus getServiceStatus() throws ServiceFailedException, AccessControlException { if (!haEnabled) { throw new ServiceFailedException("HA for namenode is not enabled"); } if (state == null) { return new HAServiceStatus(HAServiceState.INITIALIZING); } HAServiceState retState = state.getServiceState(); HAServiceStatus ret = new HAServiceStatus(retState); if (retState == HAServiceState.STANDBY) { if (namesystem.isInSafeMode()) { ret.setNotReadyToBecomeActive("The NameNode is in safemode. " + namesystem.getSafeModeTip()); } else { ret.setReadyToBecomeActive(); } } else if (retState == HAServiceState.ACTIVE) { ret.setReadyToBecomeActive(); } else { ret.setNotReadyToBecomeActive("State is " + state); } return ret; } synchronized HAServiceState getServiceState() { if (state == null) { return HAServiceState.INITIALIZING; } return state.getServiceState(); } /** * Register NameNodeStatusMXBean */ private void registerNNSMXBean() { nameNodeStatusBeanName = MBeans.register("NameNode", "NameNodeStatus", this); } @Override // NameNodeStatusMXBean public String getNNRole() { String roleStr = ""; NamenodeRole role = getRole(); if (null != role) { roleStr = role.toString(); } return roleStr; } @Override // NameNodeStatusMXBean public String getState() { String servStateStr = ""; HAServiceState servState = getServiceState(); if (null != servState) { servStateStr = servState.toString(); } return servStateStr; } @Override // NameNodeStatusMXBean public String getHostAndPort() { return getNameNodeAddressHostPortString(); } @Override // NameNodeStatusMXBean public boolean isSecurityEnabled() { return UserGroupInformation.isSecurityEnabled(); } @Override // NameNodeStatusMXBean public long getLastHATransitionTime() { return state.getLastHATransitionTime(); } @Override //NameNodeStatusMXBean public long getBytesWithFutureGenerationStamps() { return getNamesystem().getBytesInFuture(); } @Override public String getSlowPeersReport() { return namesystem.getBlockManager().getDatanodeManager() .getSlowPeersReport(); } @Override //NameNodeStatusMXBean public String getSlowDisksReport() { return namesystem.getBlockManager().getDatanodeManager() .getSlowDisksReport(); } /** * Shutdown the NN immediately in an ungraceful way. Used when it would be * unsafe for the NN to continue operating, e.g. during a failed HA state * transition. * * @param t exception which warrants the shutdown. Printed to the NN log * before exit. * @throws ExitException thrown only for testing. */ protected synchronized void doImmediateShutdown(Throwable t) throws ExitException { String message = "Error encountered requiring NN shutdown. " + "Shutting down immediately."; try { LOG.error(message, t); } catch (Throwable ignored) { // This is unlikely to happen, but there's nothing we can do if it does. } terminate(1, t); } /** * Class used to expose {@link NameNode} as context to {@link HAState} */ protected class NameNodeHAContext implements HAContext { @Override public void setState(HAState s) { state = s; } @Override public HAState getState() { return state; } @Override public void startActiveServices() throws IOException { try { namesystem.startActiveServices(); startTrashEmptier(getConf()); } catch (Throwable t) { doImmediateShutdown(t); } } @Override public void stopActiveServices() throws IOException { try { if (namesystem != null) { namesystem.stopActiveServices(); } stopTrashEmptier(); } catch (Throwable t) { doImmediateShutdown(t); } } @Override public void startStandbyServices() throws IOException { try { namesystem.startStandbyServices(getConf(), state == NameNode.OBSERVER_STATE); } catch (Throwable t) { doImmediateShutdown(t); } } @Override public void prepareToStopStandbyServices() throws ServiceFailedException { try { namesystem.prepareToStopStandbyServices(); } catch (Throwable t) { doImmediateShutdown(t); } } @Override public void stopStandbyServices() throws IOException { try { if (namesystem != null) { namesystem.stopStandbyServices(); } } catch (Throwable t) { doImmediateShutdown(t); } } @Override public void writeLock() { namesystem.writeLock(); namesystem.lockRetryCache(); } @Override public void writeUnlock() { namesystem.unlockRetryCache(); namesystem.writeUnlock(); } /** Check if an operation of given category is allowed */ @Override public void checkOperation(final OperationCategory op) throws StandbyException { state.checkOperation(haContext, op); } @Override public boolean allowStaleReads() { if (state == OBSERVER_STATE) { return true; } return allowStaleStandbyReads; } } public boolean isStandbyState() { return (state.equals(STANDBY_STATE)); } public boolean isActiveState() { return (state.equals(ACTIVE_STATE)); } public boolean isObserverState() { return state.equals(OBSERVER_STATE); } /** * Returns whether the NameNode is completely started */ boolean isStarted() { return this.started.get(); } /** * Check that a request to change this node's HA state is valid. * In particular, verifies that, if auto failover is enabled, non-forced * requests from the HAAdmin CLI are rejected, and vice versa. * * @param req the request to check * @throws AccessControlException if the request is disallowed */ void checkHaStateChange(StateChangeRequestInfo req) throws AccessControlException { boolean autoHaEnabled = getConf().getBoolean( DFS_HA_AUTO_FAILOVER_ENABLED_KEY, DFS_HA_AUTO_FAILOVER_ENABLED_DEFAULT); switch (req.getSource()) { case REQUEST_BY_USER: if (autoHaEnabled) { throw new AccessControlException( "Manual HA control for this NameNode is disallowed, because " + "automatic HA is enabled."); } break; case REQUEST_BY_USER_FORCED: if (autoHaEnabled) { LOG.warn("Allowing manual HA control from " + Server.getRemoteAddress() + " even though automatic HA is enabled, because the user " + "specified the force flag"); } break; case REQUEST_BY_ZKFC: if (!autoHaEnabled) { throw new AccessControlException( "Request from ZK failover controller at " + Server.getRemoteAddress() + " denied since automatic HA " + "is not enabled"); } break; } } /* * {@inheritDoc} * */ @Override // ReconfigurableBase public Collection<String> getReconfigurableProperties() { return reconfigurableProperties; } /* * {@inheritDoc} * */ @Override // ReconfigurableBase protected String reconfigurePropertyImpl(String property, String newVal) throws ReconfigurationException { final DatanodeManager datanodeManager = namesystem.getBlockManager() .getDatanodeManager(); if (property.equals(DFS_HEARTBEAT_INTERVAL_KEY)) { return reconfHeartbeatInterval(datanodeManager, property, newVal); } else if (property.equals(DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY)) { return reconfHeartbeatRecheckInterval(datanodeManager, property, newVal); } else if (property.equals(FS_PROTECTED_DIRECTORIES)) { return reconfProtectedDirectories(newVal); } else if (property.equals(HADOOP_CALLER_CONTEXT_ENABLED_KEY)) { return reconfCallerContextEnabled(newVal); } else if (property.equals(ipcClientRPCBackoffEnable)) { return reconfigureIPCBackoffEnabled(newVal); } else if (property.equals(DFS_STORAGE_POLICY_SATISFIER_MODE_KEY)) { return reconfigureSPSModeEvent(newVal, property); } else if (property.equals(DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY) || property.equals(DFS_NAMENODE_REPLICATION_STREAMS_HARD_LIMIT_KEY) || property.equals( DFS_NAMENODE_REPLICATION_WORK_MULTIPLIER_PER_ITERATION)) { return reconfReplicationParameters(newVal, property); } else { throw new ReconfigurationException(property, newVal, getConf().get( property)); } } private String reconfReplicationParameters(final String newVal, final String property) throws ReconfigurationException { BlockManager bm = namesystem.getBlockManager(); int newSetting; namesystem.writeLock(); try { if (property.equals(DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY)) { bm.setMaxReplicationStreams( adjustNewVal(DFS_NAMENODE_REPLICATION_MAX_STREAMS_DEFAULT, newVal)); newSetting = bm.getMaxReplicationStreams(); } else if (property.equals( DFS_NAMENODE_REPLICATION_STREAMS_HARD_LIMIT_KEY)) { bm.setReplicationStreamsHardLimit( adjustNewVal(DFS_NAMENODE_REPLICATION_STREAMS_HARD_LIMIT_DEFAULT, newVal)); newSetting = bm.getReplicationStreamsHardLimit(); } else if ( property.equals( DFS_NAMENODE_REPLICATION_WORK_MULTIPLIER_PER_ITERATION)) { bm.setBlocksReplWorkMultiplier( adjustNewVal( DFS_NAMENODE_REPLICATION_WORK_MULTIPLIER_PER_ITERATION_DEFAULT, newVal)); newSetting = bm.getBlocksReplWorkMultiplier(); } else { throw new IllegalArgumentException("Unexpected property " + property + "in reconfReplicationParameters"); } LOG.info("RECONFIGURE* changed {} to {}", property, newSetting); return String.valueOf(newSetting); } catch (IllegalArgumentException e) { throw new ReconfigurationException(property, newVal, getConf().get( property), e); } finally { namesystem.writeUnlock(); } } private int adjustNewVal(int defaultVal, String newVal) { if (newVal == null) { return defaultVal; } else { return Integer.parseInt(newVal); } } private String reconfHeartbeatInterval(final DatanodeManager datanodeManager, final String property, final String newVal) throws ReconfigurationException { namesystem.writeLock(); try { if (newVal == null) { // set to default datanodeManager.setHeartbeatInterval(DFS_HEARTBEAT_INTERVAL_DEFAULT); return String.valueOf(DFS_HEARTBEAT_INTERVAL_DEFAULT); } else { long newInterval = getConf() .getTimeDurationHelper(DFS_HEARTBEAT_INTERVAL_KEY, newVal, TimeUnit.SECONDS); datanodeManager.setHeartbeatInterval(newInterval); return String.valueOf(datanodeManager.getHeartbeatInterval()); } } catch (NumberFormatException nfe) { throw new ReconfigurationException(property, newVal, getConf().get( property), nfe); } finally { namesystem.writeUnlock(); LOG.info("RECONFIGURE* changed heartbeatInterval to " + datanodeManager.getHeartbeatInterval()); } } private String reconfHeartbeatRecheckInterval( final DatanodeManager datanodeManager, final String property, final String newVal) throws ReconfigurationException { namesystem.writeLock(); try { if (newVal == null) { // set to default datanodeManager.setHeartbeatRecheckInterval( DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_DEFAULT); return String.valueOf(DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_DEFAULT); } else { datanodeManager.setHeartbeatRecheckInterval(Integer.parseInt(newVal)); return String.valueOf(datanodeManager.getHeartbeatRecheckInterval()); } } catch (NumberFormatException nfe) { throw new ReconfigurationException(property, newVal, getConf().get( property), nfe); } finally { namesystem.writeUnlock(); LOG.info("RECONFIGURE* changed heartbeatRecheckInterval to " + datanodeManager.getHeartbeatRecheckInterval()); } } private String reconfProtectedDirectories(String newVal) { return getNamesystem().getFSDirectory().setProtectedDirectories(newVal); } private String reconfCallerContextEnabled(String newVal) { Boolean callerContextEnabled; if (newVal == null) { callerContextEnabled = HADOOP_CALLER_CONTEXT_ENABLED_DEFAULT; } else { callerContextEnabled = Boolean.parseBoolean(newVal); } namesystem.setCallerContextEnabled(callerContextEnabled); return Boolean.toString(callerContextEnabled); } String reconfigureIPCBackoffEnabled(String newVal) { boolean clientBackoffEnabled; if (newVal == null) { clientBackoffEnabled = IPC_BACKOFF_ENABLE_DEFAULT; } else { clientBackoffEnabled = Boolean.parseBoolean(newVal); } rpcServer.getClientRpcServer() .setClientBackoffEnabled(clientBackoffEnabled); return Boolean.toString(clientBackoffEnabled); } String reconfigureSPSModeEvent(String newVal, String property) throws ReconfigurationException { if (newVal == null || StoragePolicySatisfierMode.fromString(newVal) == null) { throw new ReconfigurationException(property, newVal, getConf().get(property), new HadoopIllegalArgumentException( "For enabling or disabling storage policy satisfier, must " + "pass either internal/external/none string value only")); } if (!isActiveState()) { throw new ReconfigurationException(property, newVal, getConf().get(property), new HadoopIllegalArgumentException( "Enabling or disabling storage policy satisfier service on " + state + " NameNode is not allowed")); } StoragePolicySatisfierMode mode = StoragePolicySatisfierMode .fromString(newVal); if (mode == StoragePolicySatisfierMode.NONE) { // disabling sps service if (namesystem.getBlockManager().getSPSManager() != null) { namesystem.getBlockManager().getSPSManager().changeModeEvent(mode); namesystem.getBlockManager().disableSPS(); } } else { // enabling sps service boolean spsCreated = (namesystem.getBlockManager() .getSPSManager() != null); if (!spsCreated) { spsCreated = namesystem.getBlockManager().createSPSManager(getConf(), newVal); } if (spsCreated) { namesystem.getBlockManager().getSPSManager().changeModeEvent(mode); } } return newVal; } @Override // ReconfigurableBase protected Configuration getNewConf() { return new HdfsConfiguration(); } }
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.ReconfigurableBase; import org.apache.hadoop.conf.ReconfigurationException; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Trash; import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; import org.apache.hadoop.ha.HAServiceProtocol.StateChangeRequestInfo; import org.apache.hadoop.ha.HAServiceStatus; import org.apache.hadoop.ha.HealthCheckFailedException; import org.apache.hadoop.ha.ServiceFailedException; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DFSUtilClient; import org.apache.hadoop.hdfs.HAUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants.StoragePolicySatisfierMode; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.server.aliasmap.InMemoryAliasMap; import org.apache.hadoop.hdfs.server.aliasmap.InMemoryLevelDBAliasMapServer; import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeManager; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.RollingUpgradeStartupOption; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption; import org.apache.hadoop.hdfs.server.common.MetricsLoggerTask; import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory; import org.apache.hadoop.hdfs.server.common.TokenVerifier; import org.apache.hadoop.hdfs.server.namenode.ha.ActiveState; import org.apache.hadoop.hdfs.server.namenode.ha.BootstrapStandby; import org.apache.hadoop.hdfs.server.namenode.ha.HAContext; import org.apache.hadoop.hdfs.server.namenode.ha.HAState; import org.apache.hadoop.hdfs.server.namenode.ha.StandbyState; import org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeMetrics; import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress; import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgressMetrics; import org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol; import org.apache.hadoop.hdfs.server.protocol.JournalProtocol; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; import org.apache.hadoop.hdfs.server.protocol.NamenodeRegistration; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.ipc.ExternalCall; import org.apache.hadoop.ipc.RefreshCallQueueProtocol; import org.apache.hadoop.ipc.RetriableException; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.ipc.StandbyException; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.util.MBeans; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.RefreshUserMappingsProtocol; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol; import org.apache.hadoop.tools.GetUserMappingsProtocol; import org.apache.hadoop.tracing.TraceAdminProtocol; import org.apache.hadoop.tracing.TraceUtils; import org.apache.hadoop.tracing.TracerConfigurationManager; import org.apache.hadoop.util.ExitUtil.ExitException; import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.JvmPauseMonitor; import org.apache.hadoop.util.ServicePlugin; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; import org.apache.htrace.core.Tracer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.management.ObjectName; import java.io.IOException; import java.io.PrintStream; import java.net.InetSocketAddress; import java.net.URI; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.TreeSet; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_NN_NOT_BECOME_ACTIVE_IN_SAFEMODE; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_NN_NOT_BECOME_ACTIVE_IN_SAFEMODE_DEFAULT; import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_NAMENODE_RPC_PORT_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_CALLER_CONTEXT_ENABLED_KEY; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_CALLER_CONTEXT_ENABLED_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_AUTO_FAILOVER_ENABLED_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_AUTO_FAILOVER_ENABLED_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_NAMENODE_ID_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_ZKFC_PORT_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_METRICS_PERCENTILES_INTERVALS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_BACKUP_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_BACKUP_HTTP_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_BACKUP_SERVICE_RPC_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_CHECKPOINT_DIR_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_CHECKPOINT_EDITS_DIR_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTPS_BIND_HOST_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTP_BIND_HOST_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIFELINE_RPC_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIFELINE_RPC_BIND_HOST_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_METRICS_LOGGER_PERIOD_SECONDS_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_METRICS_LOGGER_PERIOD_SECONDS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_PLUGINS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_RPC_BIND_HOST_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SERVICE_RPC_BIND_HOST_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SHARED_EDITS_DIR_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_STARTUP_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SUPPORT_ALLOW_FORMAT_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SUPPORT_ALLOW_FORMAT_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMESERVICE_ID; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.FS_PROTECTED_DIRECTORIES; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_STORAGE_POLICY_SATISFIER_MODE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_REPLICATION_MAX_STREAMS_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_REPLICATION_STREAMS_HARD_LIMIT_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_REPLICATION_STREAMS_HARD_LIMIT_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_REPLICATION_WORK_MULTIPLIER_PER_ITERATION; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_REPLICATION_WORK_MULTIPLIER_PER_ITERATION_DEFAULT; import static org.apache.hadoop.util.ExitUtil.terminate; import static org.apache.hadoop.util.ToolRunner.confirmPrompt; import static org.apache.hadoop.fs.CommonConfigurationKeys.IPC_BACKOFF_ENABLE; import static org.apache.hadoop.fs.CommonConfigurationKeys.IPC_NAMESPACE; import static org.apache.hadoop.fs.CommonConfigurationKeys.IPC_BACKOFF_ENABLE_DEFAULT; /********************************************************** * NameNode serves as both directory namespace manager and * "inode table" for the Hadoop DFS. There is a single NameNode * running in any DFS deployment. (Well, except when there * is a second backup/failover NameNode, or when using federated NameNodes.) * * The NameNode controls two critical tables: * 1) filename{@literal ->}blocksequence (namespace) * 2) block{@literal ->}machinelist ("inodes") * * The first table is stored on disk and is very precious. * The second table is rebuilt every time the NameNode comes up. * * 'NameNode' refers to both this class as well as the 'NameNode server'. * The 'FSNamesystem' class actually performs most of the filesystem * management. The majority of the 'NameNode' class itself is concerned * with exposing the IPC interface and the HTTP server to the outside world, * plus some configuration management. * * NameNode implements the * {@link org.apache.hadoop.hdfs.protocol.ClientProtocol} interface, which * allows clients to ask for DFS services. * {@link org.apache.hadoop.hdfs.protocol.ClientProtocol} is not designed for * direct use by authors of DFS client code. End-users should instead use the * {@link org.apache.hadoop.fs.FileSystem} class. * * NameNode also implements the * {@link org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol} interface, * used by DataNodes that actually store DFS data blocks. These * methods are invoked repeatedly and automatically by all the * DataNodes in a DFS deployment. * * NameNode also implements the * {@link org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol} interface, * used by secondary namenodes or rebalancing processes to get partial * NameNode state, for example partial blocksMap etc. **********************************************************/ @InterfaceAudience.Private public class NameNode extends ReconfigurableBase implements NameNodeStatusMXBean, TokenVerifier<DelegationTokenIdentifier> { static{ HdfsConfiguration.init(); } private InMemoryLevelDBAliasMapServer levelDBAliasMapServer; /** * Categories of operations supported by the namenode. */ public enum OperationCategory { /** Operations that are state agnostic */ UNCHECKED, /** Read operation that does not change the namespace state */ READ, /** Write operation that changes the namespace state */ WRITE, /** Operations related to checkpointing */ CHECKPOINT, /** Operations related to {@link JournalProtocol} */ JOURNAL } /** * HDFS configuration can have three types of parameters: * <ol> * <li>Parameters that are common for all the name services in the cluster.</li> * <li>Parameters that are specific to a name service. These keys are suffixed * with nameserviceId in the configuration. For example, * "dfs.namenode.rpc-address.nameservice1".</li> * <li>Parameters that are specific to a single name node. These keys are suffixed * with nameserviceId and namenodeId in the configuration. for example, * "dfs.namenode.rpc-address.nameservice1.namenode1"</li> * </ol> * * In the latter cases, operators may specify the configuration without * any suffix, with a nameservice suffix, or with a nameservice and namenode * suffix. The more specific suffix will take precedence. * * These keys are specific to a given namenode, and thus may be configured * globally, for a nameservice, or for a specific namenode within a nameservice. */ public static final String[] NAMENODE_SPECIFIC_KEYS = { DFS_NAMENODE_RPC_ADDRESS_KEY, DFS_NAMENODE_RPC_BIND_HOST_KEY, DFS_NAMENODE_NAME_DIR_KEY, DFS_NAMENODE_EDITS_DIR_KEY, DFS_NAMENODE_SHARED_EDITS_DIR_KEY, DFS_NAMENODE_CHECKPOINT_DIR_KEY, DFS_NAMENODE_CHECKPOINT_EDITS_DIR_KEY, DFS_NAMENODE_LIFELINE_RPC_ADDRESS_KEY, DFS_NAMENODE_LIFELINE_RPC_BIND_HOST_KEY, DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, DFS_NAMENODE_SERVICE_RPC_BIND_HOST_KEY, DFS_NAMENODE_HTTP_ADDRESS_KEY, DFS_NAMENODE_HTTPS_ADDRESS_KEY, DFS_NAMENODE_HTTP_BIND_HOST_KEY, DFS_NAMENODE_HTTPS_BIND_HOST_KEY, DFS_NAMENODE_KEYTAB_FILE_KEY, DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, DFS_NAMENODE_SECONDARY_HTTPS_ADDRESS_KEY, DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY, DFS_NAMENODE_BACKUP_ADDRESS_KEY, DFS_NAMENODE_BACKUP_HTTP_ADDRESS_KEY, DFS_NAMENODE_BACKUP_SERVICE_RPC_ADDRESS_KEY, DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, DFS_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY, DFS_HA_FENCE_METHODS_KEY, DFS_HA_ZKFC_PORT_KEY, }; /** * @see #NAMENODE_SPECIFIC_KEYS * These keys are specific to a nameservice, but may not be overridden * for a specific namenode. */ public static final String[] NAMESERVICE_SPECIFIC_KEYS = { DFS_HA_AUTO_FAILOVER_ENABLED_KEY }; private String ipcClientRPCBackoffEnable; /** A list of property that are reconfigurable at runtime. */ private final TreeSet<String> reconfigurableProperties = Sets .newTreeSet(Lists.newArrayList( DFS_HEARTBEAT_INTERVAL_KEY, DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, FS_PROTECTED_DIRECTORIES, HADOOP_CALLER_CONTEXT_ENABLED_KEY, DFS_STORAGE_POLICY_SATISFIER_MODE_KEY, DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY, DFS_NAMENODE_REPLICATION_STREAMS_HARD_LIMIT_KEY, DFS_NAMENODE_REPLICATION_WORK_MULTIPLIER_PER_ITERATION)); private static final String USAGE = "Usage: hdfs namenode [" + StartupOption.BACKUP.getName() + "] | \n\t[" + StartupOption.CHECKPOINT.getName() + "] | \n\t[" + StartupOption.FORMAT.getName() + " [" + StartupOption.CLUSTERID.getName() + " cid ] [" + StartupOption.FORCE.getName() + "] [" + StartupOption.NONINTERACTIVE.getName() + "] ] | \n\t[" + StartupOption.UPGRADE.getName() + " [" + StartupOption.CLUSTERID.getName() + " cid]" + " [" + StartupOption.RENAMERESERVED.getName() + "<k-v pairs>] ] | \n\t[" + StartupOption.UPGRADEONLY.getName() + " [" + StartupOption.CLUSTERID.getName() + " cid]" + " [" + StartupOption.RENAMERESERVED.getName() + "<k-v pairs>] ] | \n\t[" + StartupOption.ROLLBACK.getName() + "] | \n\t[" + StartupOption.ROLLINGUPGRADE.getName() + " " + RollingUpgradeStartupOption.getAllOptionString() + " ] | \n\t[" + StartupOption.IMPORT.getName() + "] | \n\t[" + StartupOption.INITIALIZESHAREDEDITS.getName() + "] | \n\t[" + StartupOption.BOOTSTRAPSTANDBY.getName() + " [" + StartupOption.FORCE.getName() + "] [" + StartupOption.NONINTERACTIVE.getName() + "] [" + StartupOption.SKIPSHAREDEDITSCHECK.getName() + "] ] | \n\t[" + StartupOption.RECOVER.getName() + " [ " + StartupOption.FORCE.getName() + "] ] | \n\t[" + StartupOption.METADATAVERSION.getName() + " ]"; public long getProtocolVersion(String protocol, long clientVersion) throws IOException { if (protocol.equals(ClientProtocol.class.getName())) { return ClientProtocol.versionID; } else if (protocol.equals(DatanodeProtocol.class.getName())){ return DatanodeProtocol.versionID; } else if (protocol.equals(NamenodeProtocol.class.getName())){ return NamenodeProtocol.versionID; } else if (protocol.equals(RefreshAuthorizationPolicyProtocol.class.getName())){ return RefreshAuthorizationPolicyProtocol.versionID; } else if (protocol.equals(RefreshUserMappingsProtocol.class.getName())){ return RefreshUserMappingsProtocol.versionID; } else if (protocol.equals(RefreshCallQueueProtocol.class.getName())) { return RefreshCallQueueProtocol.versionID; } else if (protocol.equals(GetUserMappingsProtocol.class.getName())){ return GetUserMappingsProtocol.versionID; } else if (protocol.equals(TraceAdminProtocol.class.getName())){ return TraceAdminProtocol.versionID; } else { throw new IOException("Unknown protocol to name node: " + protocol); } } /** * @deprecated Use {@link HdfsClientConfigKeys#DFS_NAMENODE_RPC_PORT_DEFAULT} * instead. */ @Deprecated public static final int DEFAULT_PORT = DFS_NAMENODE_RPC_PORT_DEFAULT; public static final Logger LOG = LoggerFactory.getLogger(NameNode.class.getName()); public static final Logger stateChangeLog = LoggerFactory.getLogger("org.apache.hadoop.hdfs.StateChange"); public static final Logger blockStateChangeLog = LoggerFactory.getLogger("BlockStateChange"); public static final HAState ACTIVE_STATE = new ActiveState(); public static final HAState STANDBY_STATE = new StandbyState(); public static final HAState OBSERVER_STATE = new StandbyState(true); private static final String NAMENODE_HTRACE_PREFIX = "namenode.htrace."; public static final Log MetricsLog = LogFactory.getLog("NameNodeMetricsLog"); protected FSNamesystem namesystem; protected final NamenodeRole role; private volatile HAState state; private final boolean haEnabled; private final HAContext haContext; protected final boolean allowStaleStandbyReads; private AtomicBoolean started = new AtomicBoolean(false); private final boolean notBecomeActiveInSafemode; private final static int HEALTH_MONITOR_WARN_THRESHOLD_MS = 5000; /** httpServer */ protected NameNodeHttpServer httpServer; private Thread emptier; /** only used for testing purposes */ protected boolean stopRequested = false; /** Registration information of this name-node */ protected NamenodeRegistration nodeRegistration; /** Activated plug-ins. */ private List<ServicePlugin> plugins; private NameNodeRpcServer rpcServer; private JvmPauseMonitor pauseMonitor; private ObjectName nameNodeStatusBeanName; protected final Tracer tracer; protected final TracerConfigurationManager tracerConfigurationManager; ScheduledThreadPoolExecutor metricsLoggerTimer; /** * The namenode address that clients will use to access this namenode * or the name service. For HA configurations using logical URI, it * will be the logical address. */ private String clientNamenodeAddress; /** Format a new filesystem. Destroys any filesystem that may already * exist at this location. **/ public static void format(Configuration conf) throws IOException { format(conf, true, true); } static NameNodeMetrics metrics; private static final StartupProgress startupProgress = new StartupProgress(); /** Return the {@link FSNamesystem} object. * @return {@link FSNamesystem} object. */ public FSNamesystem getNamesystem() { return namesystem; } public NamenodeProtocols getRpcServer() { return rpcServer; } @VisibleForTesting public HttpServer2 getHttpServer() { return httpServer.getHttpServer(); } public void queueExternalCall(ExternalCall<?> extCall) throws IOException, InterruptedException { if (rpcServer == null) { throw new RetriableException("Namenode is in startup mode"); } rpcServer.getClientRpcServer().queueCall(extCall); } public static void initMetrics(Configuration conf, NamenodeRole role) { metrics = NameNodeMetrics.create(conf, role); } public static NameNodeMetrics getNameNodeMetrics() { return metrics; } /** * Returns object used for reporting namenode startup progress. * * @return StartupProgress for reporting namenode startup progress */ public static StartupProgress getStartupProgress() { return startupProgress; } /** * Return the service name of the issued delegation token. * * @return The name service id in HA-mode, or the rpc address in non-HA mode */ public String getTokenServiceName() { return getClientNamenodeAddress(); } /** * Get the namenode address to be used by clients. * @return nn address */ public String getClientNamenodeAddress() { return clientNamenodeAddress; } /** * Set the configuration property for the service rpc address * to address */ public static void setServiceAddress(Configuration conf, String address) { LOG.info("Setting ADDRESS {}", address); conf.set(DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, address); } /** * Fetches the address for services to use when connecting to namenode * based on the value of fallback returns null if the special * address is not specified or returns the default namenode address * to be used by both clients and services. * Services here are datanodes, backup node, any non client connection */ public static InetSocketAddress getServiceAddress(Configuration conf, boolean fallback) { String addr = conf.getTrimmed(DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY); if (addr == null || addr.isEmpty()) { return fallback ? DFSUtilClient.getNNAddress(conf) : null; } return DFSUtilClient.getNNAddress(addr); } // // Common NameNode methods implementation for the active name-node role. // public NamenodeRole getRole() { return role; } boolean isRole(NamenodeRole that) { return role.equals(that); } public static String composeNotStartedMessage(NamenodeRole role) { return role + " still not started"; } /** * Given a configuration get the address of the lifeline RPC server. * If the lifeline RPC is not configured returns null. * * @param conf configuration * @return address or null */ InetSocketAddress getLifelineRpcServerAddress(Configuration conf) { String addr = getTrimmedOrNull(conf, DFS_NAMENODE_LIFELINE_RPC_ADDRESS_KEY); if (addr == null) { return null; } return NetUtils.createSocketAddr(addr); } /** * Given a configuration get the address of the service rpc server * If the service rpc is not configured returns null */ protected InetSocketAddress getServiceRpcServerAddress(Configuration conf) { return NameNode.getServiceAddress(conf, false); } protected InetSocketAddress getRpcServerAddress(Configuration conf) { return DFSUtilClient.getNNAddress(conf); } /** * Given a configuration get the bind host of the lifeline RPC server. * If the bind host is not configured returns null. * * @param conf configuration * @return bind host or null */ String getLifelineRpcServerBindHost(Configuration conf) { return getTrimmedOrNull(conf, DFS_NAMENODE_LIFELINE_RPC_BIND_HOST_KEY); } /** Given a configuration get the bind host of the service rpc server * If the bind host is not configured returns null. */ protected String getServiceRpcServerBindHost(Configuration conf) { return getTrimmedOrNull(conf, DFS_NAMENODE_SERVICE_RPC_BIND_HOST_KEY); } /** Given a configuration get the bind host of the client rpc server * If the bind host is not configured returns null. */ protected String getRpcServerBindHost(Configuration conf) { return getTrimmedOrNull(conf, DFS_NAMENODE_RPC_BIND_HOST_KEY); } /** * Gets a trimmed value from configuration, or null if no value is defined. * * @param conf configuration * @param key configuration key to get * @return trimmed value, or null if no value is defined */ private static String getTrimmedOrNull(Configuration conf, String key) { String addr = conf.getTrimmed(key); if (addr == null || addr.isEmpty()) { return null; } return addr; } /** * Modifies the configuration to contain the lifeline RPC address setting. * * @param conf configuration to modify * @param lifelineRPCAddress lifeline RPC address */ void setRpcLifelineServerAddress(Configuration conf, InetSocketAddress lifelineRPCAddress) { LOG.info("Setting lifeline RPC address {}", lifelineRPCAddress); conf.set(DFS_NAMENODE_LIFELINE_RPC_ADDRESS_KEY, NetUtils.getHostPortString(lifelineRPCAddress)); } /** * Modifies the configuration passed to contain the service rpc address setting */ protected void setRpcServiceServerAddress(Configuration conf, InetSocketAddress serviceRPCAddress) { setServiceAddress(conf, NetUtils.getHostPortString(serviceRPCAddress)); } protected void setRpcServerAddress(Configuration conf, InetSocketAddress rpcAddress) { FileSystem.setDefaultUri(conf, DFSUtilClient.getNNUri(rpcAddress)); } protected InetSocketAddress getHttpServerAddress(Configuration conf) { return getHttpAddress(conf); } /** * HTTP server address for binding the endpoint. This method is * for use by the NameNode and its derivatives. It may return * a different address than the one that should be used by clients to * connect to the NameNode. See * {@link DFSConfigKeys#DFS_NAMENODE_HTTP_BIND_HOST_KEY} * * @param conf * @return */ protected InetSocketAddress getHttpServerBindAddress(Configuration conf) { InetSocketAddress bindAddress = getHttpServerAddress(conf); // If DFS_NAMENODE_HTTP_BIND_HOST_KEY exists then it overrides the // host name portion of DFS_NAMENODE_HTTP_ADDRESS_KEY. final String bindHost = conf.getTrimmed(DFS_NAMENODE_HTTP_BIND_HOST_KEY); if (bindHost != null && !bindHost.isEmpty()) { bindAddress = new InetSocketAddress(bindHost, bindAddress.getPort()); } return bindAddress; } /** @return the NameNode HTTP address. */ public static InetSocketAddress getHttpAddress(Configuration conf) { return NetUtils.createSocketAddr( conf.getTrimmed(DFS_NAMENODE_HTTP_ADDRESS_KEY, DFS_NAMENODE_HTTP_ADDRESS_DEFAULT)); } protected void loadNamesystem(Configuration conf) throws IOException { this.namesystem = FSNamesystem.loadFromDisk(conf); } NamenodeRegistration getRegistration() { return nodeRegistration; } NamenodeRegistration setRegistration() { nodeRegistration = new NamenodeRegistration( NetUtils.getHostPortString(getNameNodeAddress()), NetUtils.getHostPortString(getHttpAddress()), getFSImage().getStorage(), getRole()); return nodeRegistration; } /* optimize ugi lookup for RPC operations to avoid a trip through * UGI.getCurrentUser which is synch'ed */ public static UserGroupInformation getRemoteUser() throws IOException { UserGroupInformation ugi = Server.getRemoteUser(); return (ugi != null) ? ugi : UserGroupInformation.getCurrentUser(); } @Override public void verifyToken(DelegationTokenIdentifier id, byte[] password) throws IOException { // during startup namesystem is null, let client retry if (namesystem == null) { throw new RetriableException("Namenode is in startup mode"); } namesystem.verifyToken(id, password); } /** * Login as the configured user for the NameNode. */ void loginAsNameNodeUser(Configuration conf) throws IOException { InetSocketAddress socAddr = getRpcServerAddress(conf); SecurityUtil.login(conf, DFS_NAMENODE_KEYTAB_FILE_KEY, DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, socAddr.getHostName()); } /** * Initialize name-node. * * @param conf the configuration */ protected void initialize(Configuration conf) throws IOException { if (conf.get(HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS) == null) { String intervals = conf.get(DFS_METRICS_PERCENTILES_INTERVALS_KEY); if (intervals != null) { conf.set(HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS, intervals); } } UserGroupInformation.setConfiguration(conf); loginAsNameNodeUser(conf); NameNode.initMetrics(conf, this.getRole()); StartupProgressMetrics.register(startupProgress); pauseMonitor = new JvmPauseMonitor(); pauseMonitor.init(conf); pauseMonitor.start(); metrics.getJvmMetrics().setPauseMonitor(pauseMonitor); if (NamenodeRole.NAMENODE == role) { startHttpServer(conf); } loadNamesystem(conf); startAliasMapServerIfNecessary(conf); rpcServer = createRpcServer(conf); initReconfigurableBackoffKey(); if (clientNamenodeAddress == null) { // This is expected for MiniDFSCluster. Set it now using // the RPC server's bind address. clientNamenodeAddress = NetUtils.getHostPortString(getNameNodeAddress()); LOG.info("Clients are to use " + clientNamenodeAddress + " to access" + " this namenode/service."); } if (NamenodeRole.NAMENODE == role) { httpServer.setNameNodeAddress(getNameNodeAddress()); httpServer.setFSImage(getFSImage()); if (levelDBAliasMapServer != null) { httpServer.setAliasMap(levelDBAliasMapServer.getAliasMap()); } } startCommonServices(conf); startMetricsLogger(conf); } @VisibleForTesting public InMemoryLevelDBAliasMapServer getAliasMapServer() { return levelDBAliasMapServer; } private void startAliasMapServerIfNecessary(Configuration conf) throws IOException { if (conf.getBoolean(DFSConfigKeys.DFS_NAMENODE_PROVIDED_ENABLED, DFSConfigKeys.DFS_NAMENODE_PROVIDED_ENABLED_DEFAULT) && conf.getBoolean(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_ENABLED, DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_ENABLED_DEFAULT)) { levelDBAliasMapServer = new InMemoryLevelDBAliasMapServer( InMemoryAliasMap::init, namesystem.getBlockPoolId()); levelDBAliasMapServer.setConf(conf); levelDBAliasMapServer.start(); } } private void initReconfigurableBackoffKey() { ipcClientRPCBackoffEnable = buildBackoffEnableKey(rpcServer .getClientRpcServer().getPort()); reconfigurableProperties.add(ipcClientRPCBackoffEnable); } static String buildBackoffEnableKey(final int port) { // format used to construct backoff enable key, e.g. ipc.8020.backoff.enable String format = "%s.%d.%s"; return String.format(format, IPC_NAMESPACE, port, IPC_BACKOFF_ENABLE); } /** * Start a timer to periodically write NameNode metrics to the log * file. This behavior can be disabled by configuration. * @param conf */ protected void startMetricsLogger(Configuration conf) { long metricsLoggerPeriodSec = conf.getInt(DFS_NAMENODE_METRICS_LOGGER_PERIOD_SECONDS_KEY, DFS_NAMENODE_METRICS_LOGGER_PERIOD_SECONDS_DEFAULT); if (metricsLoggerPeriodSec <= 0) { return; } MetricsLoggerTask.makeMetricsLoggerAsync(MetricsLog); // Schedule the periodic logging. metricsLoggerTimer = new ScheduledThreadPoolExecutor(1); metricsLoggerTimer.setExecuteExistingDelayedTasksAfterShutdownPolicy( false); metricsLoggerTimer.scheduleWithFixedDelay(new MetricsLoggerTask(MetricsLog, "NameNode", (short) 128), metricsLoggerPeriodSec, metricsLoggerPeriodSec, TimeUnit.SECONDS); } protected void stopMetricsLogger() { if (metricsLoggerTimer != null) { metricsLoggerTimer.shutdown(); metricsLoggerTimer = null; } } /** * Create the RPC server implementation. Used as an extension point for the * BackupNode. */ protected NameNodeRpcServer createRpcServer(Configuration conf) throws IOException { return new NameNodeRpcServer(conf, this); } /** Start the services common to active and standby states */ private void startCommonServices(Configuration conf) throws IOException { namesystem.startCommonServices(conf, haContext); registerNNSMXBean(); if (NamenodeRole.NAMENODE != role) { startHttpServer(conf); httpServer.setNameNodeAddress(getNameNodeAddress()); httpServer.setFSImage(getFSImage()); if (levelDBAliasMapServer != null) { httpServer.setAliasMap(levelDBAliasMapServer.getAliasMap()); } } rpcServer.start(); try { plugins = conf.getInstances(DFS_NAMENODE_PLUGINS_KEY, ServicePlugin.class); } catch (RuntimeException e) { String pluginsValue = conf.get(DFS_NAMENODE_PLUGINS_KEY); LOG.error("Unable to load NameNode plugins. Specified list of plugins: " + pluginsValue, e); throw e; } for (ServicePlugin p: plugins) { try { p.start(this); } catch (Throwable t) { LOG.warn("ServicePlugin " + p + " could not be started", t); } } LOG.info(getRole() + " RPC up at: " + getNameNodeAddress()); if (rpcServer.getServiceRpcAddress() != null) { LOG.info(getRole() + " service RPC up at: " + rpcServer.getServiceRpcAddress()); } } private void stopCommonServices() { if(rpcServer != null) rpcServer.stop(); if(namesystem != null) namesystem.close(); if (pauseMonitor != null) pauseMonitor.stop(); if (plugins != null) { for (ServicePlugin p : plugins) { try { p.stop(); } catch (Throwable t) { LOG.warn("ServicePlugin " + p + " could not be stopped", t); } } } stopHttpServer(); } private void startTrashEmptier(final Configuration conf) throws IOException { long trashInterval = conf.getLong(FS_TRASH_INTERVAL_KEY, FS_TRASH_INTERVAL_DEFAULT); if (trashInterval == 0) { return; } else if (trashInterval < 0) { throw new IOException("Cannot start trash emptier with negative interval." + " Set " + FS_TRASH_INTERVAL_KEY + " to a positive value."); } // This may be called from the transitionToActive code path, in which // case the current user is the administrator, not the NN. The trash // emptier needs to run as the NN. See HDFS-3972. FileSystem fs = SecurityUtil.doAsLoginUser( new PrivilegedExceptionAction<FileSystem>() { @Override public FileSystem run() throws IOException { return FileSystem.get(conf); } }); this.emptier = new Thread(new Trash(fs, conf).getEmptier(), "Trash Emptier"); this.emptier.setDaemon(true); this.emptier.start(); } private void stopTrashEmptier() { if (this.emptier != null) { emptier.interrupt(); emptier = null; } } private void startHttpServer(final Configuration conf) throws IOException { httpServer = new NameNodeHttpServer(conf, this, getHttpServerBindAddress(conf)); httpServer.start(); httpServer.setStartupProgress(startupProgress); } private void stopHttpServer() { try { if (httpServer != null) httpServer.stop(); } catch (Exception e) { LOG.error("Exception while stopping httpserver", e); } } /** * Start NameNode. * <p> * The name-node can be started with one of the following startup options: * <ul> * <li>{@link StartupOption#REGULAR REGULAR} - normal name node startup</li> * <li>{@link StartupOption#FORMAT FORMAT} - format name node</li> * <li>{@link StartupOption#BACKUP BACKUP} - start backup node</li> * <li>{@link StartupOption#CHECKPOINT CHECKPOINT} - start checkpoint node</li> * <li>{@link StartupOption#UPGRADE UPGRADE} - start the cluster * <li>{@link StartupOption#UPGRADEONLY UPGRADEONLY} - upgrade the cluster * upgrade and create a snapshot of the current file system state</li> * <li>{@link StartupOption#RECOVER RECOVERY} - recover name node * metadata</li> * <li>{@link StartupOption#ROLLBACK ROLLBACK} - roll the * cluster back to the previous state</li> * <li>{@link StartupOption#IMPORT IMPORT} - import checkpoint</li> * </ul> * The option is passed via configuration field: * <tt>dfs.namenode.startup</tt> * * The conf will be modified to reflect the actual ports on which * the NameNode is up and running if the user passes the port as * <code>zero</code> in the conf. * * @param conf confirguration * @throws IOException */ public NameNode(Configuration conf) throws IOException { this(conf, NamenodeRole.NAMENODE); } protected NameNode(Configuration conf, NamenodeRole role) throws IOException { super(conf); this.tracer = new Tracer.Builder("NameNode"). conf(TraceUtils.wrapHadoopConf(NAMENODE_HTRACE_PREFIX, conf)). build(); this.tracerConfigurationManager = new TracerConfigurationManager(NAMENODE_HTRACE_PREFIX, conf); this.role = role; String nsId = getNameServiceId(conf); String namenodeId = HAUtil.getNameNodeId(conf, nsId); clientNamenodeAddress = NameNodeUtils.getClientNamenodeAddress( conf, nsId); if (clientNamenodeAddress != null) { LOG.info("Clients should use {} to access" + " this namenode/service.", clientNamenodeAddress); } this.haEnabled = HAUtil.isHAEnabled(conf, nsId); state = createHAState(getStartupOption(conf)); this.allowStaleStandbyReads = HAUtil.shouldAllowStandbyReads(conf); this.haContext = createHAContext(); try { initializeGenericKeys(conf, nsId, namenodeId); initialize(getConf()); try { haContext.writeLock(); state.prepareToEnterState(haContext); state.enterState(haContext); } finally { haContext.writeUnlock(); } } catch (IOException e) { this.stopAtException(e); throw e; } catch (HadoopIllegalArgumentException e) { this.stopAtException(e); throw e; } notBecomeActiveInSafemode = conf.getBoolean( DFS_HA_NN_NOT_BECOME_ACTIVE_IN_SAFEMODE, DFS_HA_NN_NOT_BECOME_ACTIVE_IN_SAFEMODE_DEFAULT); this.started.set(true); } private void stopAtException(Exception e){ try { this.stop(); } catch (Exception ex) { LOG.warn("Encountered exception when handling exception (" + e.getMessage() + "):", ex); } } protected HAState createHAState(StartupOption startOpt) { if (!haEnabled || startOpt == StartupOption.UPGRADE || startOpt == StartupOption.UPGRADEONLY) { return ACTIVE_STATE; } else if (startOpt == StartupOption.OBSERVER) { return OBSERVER_STATE; } else { return STANDBY_STATE; } } protected HAContext createHAContext() { return new NameNodeHAContext(); } /** * Wait for service to finish. * (Normally, it runs forever.) */ public void join() { try { rpcServer.join(); } catch (InterruptedException ie) { LOG.info("Caught interrupted exception ", ie); } } /** * Stop all NameNode threads and wait for all to finish. */ public void stop() { synchronized(this) { if (stopRequested) return; stopRequested = true; } try { if (state != null) { state.exitState(haContext); } } catch (ServiceFailedException e) { LOG.warn("Encountered exception while exiting state ", e); } finally { stopMetricsLogger(); stopCommonServices(); if (metrics != null) { metrics.shutdown(); } if (namesystem != null) { namesystem.shutdown(); } if (nameNodeStatusBeanName != null) { MBeans.unregister(nameNodeStatusBeanName); nameNodeStatusBeanName = null; } if (levelDBAliasMapServer != null) { levelDBAliasMapServer.close(); } } tracer.close(); } synchronized boolean isStopRequested() { return stopRequested; } /** * Is the cluster currently in safe mode? */ public boolean isInSafeMode() { return namesystem.isInSafeMode(); } /** get FSImage */ @VisibleForTesting public FSImage getFSImage() { return namesystem.getFSImage(); } /** * @return NameNode RPC address */ public InetSocketAddress getNameNodeAddress() { return rpcServer.getRpcAddress(); } /** * @return The auxiliary nameNode RPC addresses, or empty set if there * is none. */ public Set<InetSocketAddress> getAuxiliaryNameNodeAddresses() { return rpcServer.getAuxiliaryRpcAddresses(); } /** * @return NameNode RPC address in "host:port" string form */ public String getNameNodeAddressHostPortString() { return NetUtils.getHostPortString(getNameNodeAddress()); } /** * Return a host:port format string corresponds to an auxiliary * port configured on NameNode. If there are multiple auxiliary ports, * an arbitrary one is returned. If there is no auxiliary listener, returns * null. * * @return a string of format host:port that points to an auxiliary NameNode * address, or null if there is no such address. */ @VisibleForTesting public String getNNAuxiliaryRpcAddress() { Set<InetSocketAddress> auxiliaryAddrs = getAuxiliaryNameNodeAddresses(); if (auxiliaryAddrs.isEmpty()) { return null; } // since set has no particular order, returning the first element of // from the iterator is effectively arbitrary. InetSocketAddress addr = auxiliaryAddrs.iterator().next(); return NetUtils.getHostPortString(addr); } /** * @return NameNode service RPC address if configured, the * NameNode RPC address otherwise */ public InetSocketAddress getServiceRpcAddress() { final InetSocketAddress serviceAddr = rpcServer.getServiceRpcAddress(); return serviceAddr == null ? getNameNodeAddress() : serviceAddr; } /** * @return NameNode HTTP address, used by the Web UI, image transfer, * and HTTP-based file system clients like WebHDFS */ public InetSocketAddress getHttpAddress() { return httpServer.getHttpAddress(); } /** * @return NameNode HTTPS address, used by the Web UI, image transfer, * and HTTP-based file system clients like WebHDFS */ public InetSocketAddress getHttpsAddress() { return httpServer.getHttpsAddress(); } /** * NameNodeHttpServer, used by unit tests to ensure a full shutdown, * so that no bind exception is thrown during restart. */ @VisibleForTesting public void joinHttpServer() { if (httpServer != null) { try { httpServer.join(); } catch (InterruptedException e) { LOG.info("Caught InterruptedException joining NameNodeHttpServer", e); Thread.currentThread().interrupt(); } } } /** * Verify that configured directories exist, then * Interactively confirm that formatting is desired * for each existing directory and format them. * * @param conf configuration to use * @param force if true, format regardless of whether dirs exist * @return true if formatting was aborted, false otherwise * @throws IOException */ private static boolean format(Configuration conf, boolean force, boolean isInteractive) throws IOException { String nsId = DFSUtil.getNamenodeNameServiceId(conf); String namenodeId = HAUtil.getNameNodeId(conf, nsId); initializeGenericKeys(conf, nsId, namenodeId); checkAllowFormat(conf); if (UserGroupInformation.isSecurityEnabled()) { InetSocketAddress socAddr = DFSUtilClient.getNNAddress(conf); SecurityUtil.login(conf, DFS_NAMENODE_KEYTAB_FILE_KEY, DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, socAddr.getHostName()); } Collection<URI> nameDirsToFormat = FSNamesystem.getNamespaceDirs(conf); List<URI> sharedDirs = FSNamesystem.getSharedEditsDirs(conf); List<URI> dirsToPrompt = new ArrayList<URI>(); dirsToPrompt.addAll(nameDirsToFormat); dirsToPrompt.addAll(sharedDirs); List<URI> editDirsToFormat = FSNamesystem.getNamespaceEditsDirs(conf); // if clusterID is not provided - see if you can find the current one String clusterId = StartupOption.FORMAT.getClusterId(); if(clusterId == null || clusterId.equals("")) { //Generate a new cluster id clusterId = NNStorage.newClusterID(); } System.out.println("Formatting using clusterid: " + clusterId); FSImage fsImage = new FSImage(conf, nameDirsToFormat, editDirsToFormat); try { FSNamesystem fsn = new FSNamesystem(conf, fsImage); fsImage.getEditLog().initJournalsForWrite(); // Abort NameNode format if reformat is disabled and if // meta-dir already exists if (conf.getBoolean(DFSConfigKeys.DFS_REFORMAT_DISABLED, DFSConfigKeys.DFS_REFORMAT_DISABLED_DEFAULT)) { force = false; isInteractive = false; for (StorageDirectory sd : fsImage.storage.dirIterable(null)) { if (sd.hasSomeData()) { throw new NameNodeFormatException( "NameNode format aborted as reformat is disabled for " + "this cluster."); } } } if (!fsImage.confirmFormat(force, isInteractive)) { return true; // aborted } fsImage.format(fsn, clusterId, force); } catch (IOException ioe) { LOG.warn("Encountered exception during format: ", ioe); fsImage.close(); throw ioe; } return false; } public static void checkAllowFormat(Configuration conf) throws IOException { if (!conf.getBoolean(DFS_NAMENODE_SUPPORT_ALLOW_FORMAT_KEY, DFS_NAMENODE_SUPPORT_ALLOW_FORMAT_DEFAULT)) { throw new IOException("The option " + DFS_NAMENODE_SUPPORT_ALLOW_FORMAT_KEY + " is set to false for this filesystem, so it " + "cannot be formatted. You will need to set " + DFS_NAMENODE_SUPPORT_ALLOW_FORMAT_KEY +" parameter " + "to true in order to format this filesystem"); } } @VisibleForTesting public static boolean initializeSharedEdits(Configuration conf) throws IOException { return initializeSharedEdits(conf, true); } @VisibleForTesting public static boolean initializeSharedEdits(Configuration conf, boolean force) throws IOException { return initializeSharedEdits(conf, force, false); } /** * Clone the supplied configuration but remove the shared edits dirs. * * @param conf Supplies the original configuration. * @return Cloned configuration without the shared edit dirs. * @throws IOException on failure to generate the configuration. */ private static Configuration getConfigurationWithoutSharedEdits( Configuration conf) throws IOException { List<URI> editsDirs = FSNamesystem.getNamespaceEditsDirs(conf, false); String editsDirsString = Joiner.on(",").join(editsDirs); Configuration confWithoutShared = new Configuration(conf); confWithoutShared.unset(DFSConfigKeys.DFS_NAMENODE_SHARED_EDITS_DIR_KEY); confWithoutShared.setStrings(DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY, editsDirsString); return confWithoutShared; } /** * Format a new shared edits dir and copy in enough edit log segments so that * the standby NN can start up. * * @param conf configuration * @param force format regardless of whether or not the shared edits dir exists * @param interactive prompt the user when a dir exists * @return true if the command aborts, false otherwise */ private static boolean initializeSharedEdits(Configuration conf, boolean force, boolean interactive) throws IOException { String nsId = DFSUtil.getNamenodeNameServiceId(conf); String namenodeId = HAUtil.getNameNodeId(conf, nsId); initializeGenericKeys(conf, nsId, namenodeId); if (conf.get(DFSConfigKeys.DFS_NAMENODE_SHARED_EDITS_DIR_KEY) == null) { LOG.error("No shared edits directory configured for namespace " + nsId + " namenode " + namenodeId); return false; } if (UserGroupInformation.isSecurityEnabled()) { InetSocketAddress socAddr = DFSUtilClient.getNNAddress(conf); SecurityUtil.login(conf, DFS_NAMENODE_KEYTAB_FILE_KEY, DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, socAddr.getHostName()); } NNStorage existingStorage = null; FSImage sharedEditsImage = null; try { FSNamesystem fsns = FSNamesystem.loadFromDisk(getConfigurationWithoutSharedEdits(conf)); existingStorage = fsns.getFSImage().getStorage(); NamespaceInfo nsInfo = existingStorage.getNamespaceInfo(); List<URI> sharedEditsDirs = FSNamesystem.getSharedEditsDirs(conf); sharedEditsImage = new FSImage(conf, Lists.<URI>newArrayList(), sharedEditsDirs); sharedEditsImage.getEditLog().initJournalsForWrite(); if (!sharedEditsImage.confirmFormat(force, interactive)) { return true; // abort } NNStorage newSharedStorage = sharedEditsImage.getStorage(); // Call Storage.format instead of FSImage.format here, since we don't // actually want to save a checkpoint - just prime the dirs with // the existing namespace info newSharedStorage.format(nsInfo); sharedEditsImage.getEditLog().formatNonFileJournals(nsInfo, force); // Need to make sure the edit log segments are in good shape to initialize // the shared edits dir. fsns.getFSImage().getEditLog().close(); fsns.getFSImage().getEditLog().initJournalsForWrite(); fsns.getFSImage().getEditLog().recoverUnclosedStreams(); copyEditLogSegmentsToSharedDir(fsns, sharedEditsDirs, newSharedStorage, conf); } catch (IOException ioe) { LOG.error("Could not initialize shared edits dir", ioe); return true; // aborted } finally { if (sharedEditsImage != null) { try { sharedEditsImage.close(); } catch (IOException ioe) { LOG.warn("Could not close sharedEditsImage", ioe); } } // Have to unlock storage explicitly for the case when we're running in a // unit test, which runs in the same JVM as NNs. if (existingStorage != null) { try { existingStorage.unlockAll(); } catch (IOException ioe) { LOG.warn("Could not unlock storage directories", ioe); return true; // aborted } } } return false; // did not abort } private static void copyEditLogSegmentsToSharedDir(FSNamesystem fsns, Collection<URI> sharedEditsDirs, NNStorage newSharedStorage, Configuration conf) throws IOException { Preconditions.checkArgument(!sharedEditsDirs.isEmpty(), "No shared edits specified"); // Copy edit log segments into the new shared edits dir. List<URI> sharedEditsUris = new ArrayList<URI>(sharedEditsDirs); FSEditLog newSharedEditLog = new FSEditLog(conf, newSharedStorage, sharedEditsUris); newSharedEditLog.initJournalsForWrite(); newSharedEditLog.recoverUnclosedStreams(); FSEditLog sourceEditLog = fsns.getFSImage().editLog; long fromTxId = fsns.getFSImage().getMostRecentCheckpointTxId(); Collection<EditLogInputStream> streams = null; try { streams = sourceEditLog.selectInputStreams(fromTxId + 1, 0); // Set the nextTxid to the CheckpointTxId+1 newSharedEditLog.setNextTxId(fromTxId + 1); // Copy all edits after last CheckpointTxId to shared edits dir for (EditLogInputStream stream : streams) { LOG.debug("Beginning to copy stream " + stream + " to shared edits"); FSEditLogOp op; boolean segmentOpen = false; while ((op = stream.readOp()) != null) { if (LOG.isTraceEnabled()) { LOG.trace("copying op: " + op); } if (!segmentOpen) { newSharedEditLog.startLogSegment(op.txid, false, fsns.getEffectiveLayoutVersion()); segmentOpen = true; } newSharedEditLog.logEdit(op); if (op.opCode == FSEditLogOpCodes.OP_END_LOG_SEGMENT) { newSharedEditLog.endCurrentLogSegment(false); LOG.debug("ending log segment because of END_LOG_SEGMENT op in " + stream); segmentOpen = false; } } if (segmentOpen) { LOG.debug("ending log segment because of end of stream in " + stream); newSharedEditLog.logSync(); newSharedEditLog.endCurrentLogSegment(false); segmentOpen = false; } } } finally { if (streams != null) { FSEditLog.closeAllStreams(streams); } } } @VisibleForTesting public static boolean doRollback(Configuration conf, boolean isConfirmationNeeded) throws IOException { String nsId = DFSUtil.getNamenodeNameServiceId(conf); String namenodeId = HAUtil.getNameNodeId(conf, nsId); initializeGenericKeys(conf, nsId, namenodeId); FSNamesystem nsys = new FSNamesystem(conf, new FSImage(conf)); System.err.print( "\"rollBack\" will remove the current state of the file system,\n" + "returning you to the state prior to initiating your recent.\n" + "upgrade. This action is permanent and cannot be undone. If you\n" + "are performing a rollback in an HA environment, you should be\n" + "certain that no NameNode process is running on any host."); if (isConfirmationNeeded) { if (!confirmPrompt("Roll back file system state?")) { System.err.println("Rollback aborted."); return true; } } nsys.getFSImage().doRollback(nsys); return false; } private static void printUsage(PrintStream out) { out.println(USAGE + "\n"); } @VisibleForTesting static StartupOption parseArguments(String args[]) { int argsLen = (args == null) ? 0 : args.length; StartupOption startOpt = StartupOption.REGULAR; for(int i=0; i < argsLen; i++) { String cmd = args[i]; if (StartupOption.FORMAT.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.FORMAT; for (i = i + 1; i < argsLen; i++) { if (args[i].equalsIgnoreCase(StartupOption.CLUSTERID.getName())) { i++; if (i >= argsLen) { // if no cluster id specified, return null LOG.error("Must specify a valid cluster ID after the " + StartupOption.CLUSTERID.getName() + " flag"); return null; } String clusterId = args[i]; // Make sure an id is specified and not another flag if (clusterId.isEmpty() || clusterId.equalsIgnoreCase(StartupOption.FORCE.getName()) || clusterId.equalsIgnoreCase( StartupOption.NONINTERACTIVE.getName())) { LOG.error("Must specify a valid cluster ID after the " + StartupOption.CLUSTERID.getName() + " flag"); return null; } startOpt.setClusterId(clusterId); } if (args[i].equalsIgnoreCase(StartupOption.FORCE.getName())) { startOpt.setForceFormat(true); } if (args[i].equalsIgnoreCase(StartupOption.NONINTERACTIVE.getName())) { startOpt.setInteractiveFormat(false); } } } else if (StartupOption.GENCLUSTERID.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.GENCLUSTERID; } else if (StartupOption.REGULAR.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.REGULAR; } else if (StartupOption.BACKUP.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.BACKUP; } else if (StartupOption.CHECKPOINT.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.CHECKPOINT; } else if (StartupOption.OBSERVER.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.OBSERVER; } else if (StartupOption.UPGRADE.getName().equalsIgnoreCase(cmd) || StartupOption.UPGRADEONLY.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.UPGRADE.getName().equalsIgnoreCase(cmd) ? StartupOption.UPGRADE : StartupOption.UPGRADEONLY; /* Can be followed by CLUSTERID with a required parameter or * RENAMERESERVED with an optional parameter */ while (i + 1 < argsLen) { String flag = args[i + 1]; if (flag.equalsIgnoreCase(StartupOption.CLUSTERID.getName())) { if (i + 2 < argsLen) { i += 2; startOpt.setClusterId(args[i]); } else { LOG.error("Must specify a valid cluster ID after the " + StartupOption.CLUSTERID.getName() + " flag"); return null; } } else if (flag.equalsIgnoreCase(StartupOption.RENAMERESERVED .getName())) { if (i + 2 < argsLen) { FSImageFormat.setRenameReservedPairs(args[i + 2]); i += 2; } else { FSImageFormat.useDefaultRenameReservedPairs(); i += 1; } } else { LOG.error("Unknown upgrade flag " + flag); return null; } } } else if (StartupOption.ROLLINGUPGRADE.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.ROLLINGUPGRADE; ++i; if (i >= argsLen) { LOG.error("Must specify a rolling upgrade startup option " + RollingUpgradeStartupOption.getAllOptionString()); return null; } startOpt.setRollingUpgradeStartupOption(args[i]); } else if (StartupOption.ROLLBACK.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.ROLLBACK; } else if (StartupOption.IMPORT.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.IMPORT; } else if (StartupOption.BOOTSTRAPSTANDBY.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.BOOTSTRAPSTANDBY; return startOpt; } else if (StartupOption.INITIALIZESHAREDEDITS.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.INITIALIZESHAREDEDITS; for (i = i + 1 ; i < argsLen; i++) { if (StartupOption.NONINTERACTIVE.getName().equals(args[i])) { startOpt.setInteractiveFormat(false); } else if (StartupOption.FORCE.getName().equals(args[i])) { startOpt.setForceFormat(true); } else { LOG.error("Invalid argument: " + args[i]); return null; } } return startOpt; } else if (StartupOption.RECOVER.getName().equalsIgnoreCase(cmd)) { if (startOpt != StartupOption.REGULAR) { throw new RuntimeException("Can't combine -recover with " + "other startup options."); } startOpt = StartupOption.RECOVER; while (++i < argsLen) { if (args[i].equalsIgnoreCase( StartupOption.FORCE.getName())) { startOpt.setForce(MetaRecoveryContext.FORCE_FIRST_CHOICE); } else { throw new RuntimeException("Error parsing recovery options: " + "can't understand option \"" + args[i] + "\""); } } } else if (StartupOption.METADATAVERSION.getName().equalsIgnoreCase(cmd)) { startOpt = StartupOption.METADATAVERSION; } else { return null; } } return startOpt; } private static void setStartupOption(Configuration conf, StartupOption opt) { conf.set(DFS_NAMENODE_STARTUP_KEY, opt.name()); } public static StartupOption getStartupOption(Configuration conf) { return StartupOption.valueOf(conf.get(DFS_NAMENODE_STARTUP_KEY, StartupOption.REGULAR.toString())); } private static void doRecovery(StartupOption startOpt, Configuration conf) throws IOException { String nsId = DFSUtil.getNamenodeNameServiceId(conf); String namenodeId = HAUtil.getNameNodeId(conf, nsId); initializeGenericKeys(conf, nsId, namenodeId); if (startOpt.getForce() < MetaRecoveryContext.FORCE_ALL) { if (!confirmPrompt("You have selected Metadata Recovery mode. " + "This mode is intended to recover lost metadata on a corrupt " + "filesystem. Metadata recovery mode often permanently deletes " + "data from your HDFS filesystem. Please back up your edit log " + "and fsimage before trying this!\n\n" + "Are you ready to proceed? (Y/N)\n")) { System.err.println("Recovery aborted at user request.\n"); return; } } MetaRecoveryContext.LOG.info("starting recovery..."); UserGroupInformation.setConfiguration(conf); NameNode.initMetrics(conf, startOpt.toNodeRole()); FSNamesystem fsn = null; try { fsn = FSNamesystem.loadFromDisk(conf); fsn.getFSImage().saveNamespace(fsn); MetaRecoveryContext.LOG.info("RECOVERY COMPLETE"); } catch (IOException e) { MetaRecoveryContext.LOG.info("RECOVERY FAILED: caught exception", e); throw e; } catch (RuntimeException e) { MetaRecoveryContext.LOG.info("RECOVERY FAILED: caught exception", e); throw e; } finally { if (fsn != null) fsn.close(); } } /** * Verify that configured directories exist, then print the metadata versions * of the software and the image. * * @param conf configuration to use * @throws IOException */ private static boolean printMetadataVersion(Configuration conf) throws IOException { final String nsId = DFSUtil.getNamenodeNameServiceId(conf); final String namenodeId = HAUtil.getNameNodeId(conf, nsId); NameNode.initializeGenericKeys(conf, nsId, namenodeId); final FSImage fsImage = new FSImage(conf); final FSNamesystem fs = new FSNamesystem(conf, fsImage, false); return fsImage.recoverTransitionRead( StartupOption.METADATAVERSION, fs, null); } public static NameNode createNameNode(String argv[], Configuration conf) throws IOException { LOG.info("createNameNode " + Arrays.asList(argv)); if (conf == null) conf = new HdfsConfiguration(); // Parse out some generic args into Configuration. GenericOptionsParser hParser = new GenericOptionsParser(conf, argv); argv = hParser.getRemainingArgs(); // Parse the rest, NN specific args. StartupOption startOpt = parseArguments(argv); if (startOpt == null) { printUsage(System.err); return null; } setStartupOption(conf, startOpt); boolean aborted = false; switch (startOpt) { case FORMAT: aborted = format(conf, startOpt.getForceFormat(), startOpt.getInteractiveFormat()); terminate(aborted ? 1 : 0); return null; // avoid javac warning case GENCLUSTERID: System.err.println("Generating new cluster id:"); System.out.println(NNStorage.newClusterID()); terminate(0); return null; case ROLLBACK: aborted = doRollback(conf, true); terminate(aborted ? 1 : 0); return null; // avoid warning case BOOTSTRAPSTANDBY: String[] toolArgs = Arrays.copyOfRange(argv, 1, argv.length); int rc = BootstrapStandby.run(toolArgs, conf); terminate(rc); return null; // avoid warning case INITIALIZESHAREDEDITS: aborted = initializeSharedEdits(conf, startOpt.getForceFormat(), startOpt.getInteractiveFormat()); terminate(aborted ? 1 : 0); return null; // avoid warning case BACKUP: case CHECKPOINT: NamenodeRole role = startOpt.toNodeRole(); DefaultMetricsSystem.initialize(role.toString().replace(" ", "")); return new BackupNode(conf, role); case RECOVER: NameNode.doRecovery(startOpt, conf); return null; case METADATAVERSION: printMetadataVersion(conf); terminate(0); return null; // avoid javac warning case UPGRADEONLY: DefaultMetricsSystem.initialize("NameNode"); new NameNode(conf); terminate(0); return null; default: DefaultMetricsSystem.initialize("NameNode"); return new NameNode(conf); } } /** * In federation configuration is set for a set of * namenode and secondary namenode/backup/checkpointer, which are * grouped under a logical nameservice ID. The configuration keys specific * to them have suffix set to configured nameserviceId. * * This method copies the value from specific key of format key.nameserviceId * to key, to set up the generic configuration. Once this is done, only * generic version of the configuration is read in rest of the code, for * backward compatibility and simpler code changes. * * @param conf * Configuration object to lookup specific key and to set the value * to the key passed. Note the conf object is modified * @param nameserviceId name service Id (to distinguish federated NNs) * @param namenodeId the namenode ID (to distinguish HA NNs) * @see DFSUtil#setGenericConf(Configuration, String, String, String...) */ public static void initializeGenericKeys(Configuration conf, String nameserviceId, String namenodeId) { if ((nameserviceId != null && !nameserviceId.isEmpty()) || (namenodeId != null && !namenodeId.isEmpty())) { if (nameserviceId != null) { conf.set(DFS_NAMESERVICE_ID, nameserviceId); } if (namenodeId != null) { conf.set(DFS_HA_NAMENODE_ID_KEY, namenodeId); } DFSUtil.setGenericConf(conf, nameserviceId, namenodeId, NAMENODE_SPECIFIC_KEYS); DFSUtil.setGenericConf(conf, nameserviceId, null, NAMESERVICE_SPECIFIC_KEYS); } // If the RPC address is set use it to (re-)configure the default FS if (conf.get(DFS_NAMENODE_RPC_ADDRESS_KEY) != null) { URI defaultUri = URI.create(HdfsConstants.HDFS_URI_SCHEME + "://" + conf.get(DFS_NAMENODE_RPC_ADDRESS_KEY)); conf.set(FS_DEFAULT_NAME_KEY, defaultUri.toString()); if (LOG.isDebugEnabled()) { LOG.debug("Setting " + FS_DEFAULT_NAME_KEY + " to " + defaultUri.toString()); } } } /** * Get the name service Id for the node * @return name service Id or null if federation is not configured */ protected String getNameServiceId(Configuration conf) { return DFSUtil.getNamenodeNameServiceId(conf); } /** */ public static void main(String argv[]) throws Exception { if (DFSUtil.parseHelpArgument(argv, NameNode.USAGE, System.out, true)) { System.exit(0); } try { StringUtils.startupShutdownMessage(NameNode.class, argv, LOG); NameNode namenode = createNameNode(argv, null); if (namenode != null) { namenode.join(); } } catch (Throwable e) { LOG.error("Failed to start namenode.", e); terminate(1, e); } } synchronized void monitorHealth() throws HealthCheckFailedException, AccessControlException { namesystem.checkSuperuserPrivilege(); if (!haEnabled) { return; // no-op, if HA is not enabled } long start = Time.monotonicNow(); getNamesystem().checkAvailableResources(); long end = Time.monotonicNow(); if (end - start >= HEALTH_MONITOR_WARN_THRESHOLD_MS) { // log a warning if it take >= 5 seconds. LOG.warn("Remote IP {} checking available resources took {}ms", Server.getRemoteIp(), end - start); } if (!getNamesystem().nameNodeHasResourcesAvailable()) { throw new HealthCheckFailedException( "The NameNode has no resources available"); } if (notBecomeActiveInSafemode && isInSafeMode()) { throw new HealthCheckFailedException("The NameNode is configured to " + "report UNHEALTHY to ZKFC in Safemode."); } } synchronized void transitionToActive() throws ServiceFailedException, AccessControlException { namesystem.checkSuperuserPrivilege(); if (!haEnabled) { throw new ServiceFailedException("HA for namenode is not enabled"); } if (state == OBSERVER_STATE) { throw new ServiceFailedException( "Cannot transition from '" + OBSERVER_STATE + "' to '" + ACTIVE_STATE + "'"); } if (notBecomeActiveInSafemode && isInSafeMode()) { throw new ServiceFailedException(getRole() + " still not leave safemode"); } state.setState(haContext, ACTIVE_STATE); } synchronized void transitionToStandby() throws ServiceFailedException, AccessControlException { namesystem.checkSuperuserPrivilege(); if (!haEnabled) { throw new ServiceFailedException("HA for namenode is not enabled"); } state.setState(haContext, STANDBY_STATE); } synchronized void transitionToObserver() throws ServiceFailedException, AccessControlException { namesystem.checkSuperuserPrivilege(); if (!haEnabled) { throw new ServiceFailedException("HA for namenode is not enabled"); } // Transition from ACTIVE to OBSERVER is forbidden. if (state == ACTIVE_STATE) { throw new ServiceFailedException( "Cannot transition from '" + ACTIVE_STATE + "' to '" + OBSERVER_STATE + "'"); } state.setState(haContext, OBSERVER_STATE); } synchronized HAServiceStatus getServiceStatus() throws ServiceFailedException, AccessControlException { if (!haEnabled) { throw new ServiceFailedException("HA for namenode is not enabled"); } if (state == null) { return new HAServiceStatus(HAServiceState.INITIALIZING); } HAServiceState retState = state.getServiceState(); HAServiceStatus ret = new HAServiceStatus(retState); if (retState == HAServiceState.STANDBY) { if (namesystem.isInSafeMode()) { ret.setNotReadyToBecomeActive("The NameNode is in safemode. " + namesystem.getSafeModeTip()); } else { ret.setReadyToBecomeActive(); } } else if (retState == HAServiceState.ACTIVE) { ret.setReadyToBecomeActive(); } else { ret.setNotReadyToBecomeActive("State is " + state); } return ret; } synchronized HAServiceState getServiceState() { if (state == null) { return HAServiceState.INITIALIZING; } return state.getServiceState(); } /** * Register NameNodeStatusMXBean */ private void registerNNSMXBean() { nameNodeStatusBeanName = MBeans.register("NameNode", "NameNodeStatus", this); } @Override // NameNodeStatusMXBean public String getNNRole() { String roleStr = ""; NamenodeRole role = getRole(); if (null != role) { roleStr = role.toString(); } return roleStr; } @Override // NameNodeStatusMXBean public String getState() { String servStateStr = ""; HAServiceState servState = getServiceState(); if (null != servState) { servStateStr = servState.toString(); } return servStateStr; } @Override // NameNodeStatusMXBean public String getHostAndPort() { return getNameNodeAddressHostPortString(); } @Override // NameNodeStatusMXBean public boolean isSecurityEnabled() { return UserGroupInformation.isSecurityEnabled(); } @Override // NameNodeStatusMXBean public long getLastHATransitionTime() { return state.getLastHATransitionTime(); } @Override //NameNodeStatusMXBean public long getBytesWithFutureGenerationStamps() { return getNamesystem().getBytesInFuture(); } @Override public String getSlowPeersReport() { return namesystem.getBlockManager().getDatanodeManager() .getSlowPeersReport(); } @Override //NameNodeStatusMXBean public String getSlowDisksReport() { return namesystem.getBlockManager().getDatanodeManager() .getSlowDisksReport(); } /** * Shutdown the NN immediately in an ungraceful way. Used when it would be * unsafe for the NN to continue operating, e.g. during a failed HA state * transition. * * @param t exception which warrants the shutdown. Printed to the NN log * before exit. * @throws ExitException thrown only for testing. */ protected synchronized void doImmediateShutdown(Throwable t) throws ExitException { String message = "Error encountered requiring NN shutdown. " + "Shutting down immediately."; try { LOG.error(message, t); } catch (Throwable ignored) { // This is unlikely to happen, but there's nothing we can do if it does. } terminate(1, t); } /** * Class used to expose {@link NameNode} as context to {@link HAState} */ protected class NameNodeHAContext implements HAContext { @Override public void setState(HAState s) { state = s; } @Override public HAState getState() { return state; } @Override public void startActiveServices() throws IOException { try { namesystem.startActiveServices(); startTrashEmptier(getConf()); } catch (Throwable t) { doImmediateShutdown(t); } } @Override public void stopActiveServices() throws IOException { try { if (namesystem != null) { namesystem.stopActiveServices(); } stopTrashEmptier(); } catch (Throwable t) { doImmediateShutdown(t); } } @Override public void startStandbyServices() throws IOException { try { namesystem.startStandbyServices(getConf(), state == NameNode.OBSERVER_STATE); } catch (Throwable t) { doImmediateShutdown(t); } } @Override public void prepareToStopStandbyServices() throws ServiceFailedException { try { namesystem.prepareToStopStandbyServices(); } catch (Throwable t) { doImmediateShutdown(t); } } @Override public void stopStandbyServices() throws IOException { try { if (namesystem != null) { namesystem.stopStandbyServices(); } } catch (Throwable t) { doImmediateShutdown(t); } } @Override public void writeLock() { namesystem.writeLock(); namesystem.lockRetryCache(); } @Override public void writeUnlock() { namesystem.unlockRetryCache(); namesystem.writeUnlock(); } /** Check if an operation of given category is allowed */ @Override public void checkOperation(final OperationCategory op) throws StandbyException { state.checkOperation(haContext, op); } @Override public boolean allowStaleReads() { if (state == OBSERVER_STATE) { return true; } return allowStaleStandbyReads; } } public boolean isStandbyState() { return (state.equals(STANDBY_STATE)); } public boolean isActiveState() { return (state.equals(ACTIVE_STATE)); } public boolean isObserverState() { return state.equals(OBSERVER_STATE); } /** * Returns whether the NameNode is completely started */ boolean isStarted() { return this.started.get(); } /** * Check that a request to change this node's HA state is valid. * In particular, verifies that, if auto failover is enabled, non-forced * requests from the HAAdmin CLI are rejected, and vice versa. * * @param req the request to check * @throws AccessControlException if the request is disallowed */ void checkHaStateChange(StateChangeRequestInfo req) throws AccessControlException { boolean autoHaEnabled = getConf().getBoolean( DFS_HA_AUTO_FAILOVER_ENABLED_KEY, DFS_HA_AUTO_FAILOVER_ENABLED_DEFAULT); switch (req.getSource()) { case REQUEST_BY_USER: if (autoHaEnabled) { throw new AccessControlException( "Manual HA control for this NameNode is disallowed, because " + "automatic HA is enabled."); } break; case REQUEST_BY_USER_FORCED: if (autoHaEnabled) { LOG.warn("Allowing manual HA control from " + Server.getRemoteAddress() + " even though automatic HA is enabled, because the user " + "specified the force flag"); } break; case REQUEST_BY_ZKFC: if (!autoHaEnabled) { throw new AccessControlException( "Request from ZK failover controller at " + Server.getRemoteAddress() + " denied since automatic HA " + "is not enabled"); } break; } } /* * {@inheritDoc} * */ @Override // ReconfigurableBase public Collection<String> getReconfigurableProperties() { return reconfigurableProperties; } /* * {@inheritDoc} * */ @Override // ReconfigurableBase protected String reconfigurePropertyImpl(String property, String newVal) throws ReconfigurationException { final DatanodeManager datanodeManager = namesystem.getBlockManager() .getDatanodeManager(); if (property.equals(DFS_HEARTBEAT_INTERVAL_KEY)) { return reconfHeartbeatInterval(datanodeManager, property, newVal); } else if (property.equals(DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY)) { return reconfHeartbeatRecheckInterval(datanodeManager, property, newVal); } else if (property.equals(FS_PROTECTED_DIRECTORIES)) { return reconfProtectedDirectories(newVal); } else if (property.equals(HADOOP_CALLER_CONTEXT_ENABLED_KEY)) { return reconfCallerContextEnabled(newVal); } else if (property.equals(ipcClientRPCBackoffEnable)) { return reconfigureIPCBackoffEnabled(newVal); } else if (property.equals(DFS_STORAGE_POLICY_SATISFIER_MODE_KEY)) { return reconfigureSPSModeEvent(newVal, property); } else if (property.equals(DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY) || property.equals(DFS_NAMENODE_REPLICATION_STREAMS_HARD_LIMIT_KEY) || property.equals( DFS_NAMENODE_REPLICATION_WORK_MULTIPLIER_PER_ITERATION)) { return reconfReplicationParameters(newVal, property); } else { throw new ReconfigurationException(property, newVal, getConf().get( property)); } } private String reconfReplicationParameters(final String newVal, final String property) throws ReconfigurationException { BlockManager bm = namesystem.getBlockManager(); int newSetting; namesystem.writeLock(); try { if (property.equals(DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY)) { bm.setMaxReplicationStreams( adjustNewVal(DFS_NAMENODE_REPLICATION_MAX_STREAMS_DEFAULT, newVal)); newSetting = bm.getMaxReplicationStreams(); } else if (property.equals( DFS_NAMENODE_REPLICATION_STREAMS_HARD_LIMIT_KEY)) { bm.setReplicationStreamsHardLimit( adjustNewVal(DFS_NAMENODE_REPLICATION_STREAMS_HARD_LIMIT_DEFAULT, newVal)); newSetting = bm.getReplicationStreamsHardLimit(); } else if ( property.equals( DFS_NAMENODE_REPLICATION_WORK_MULTIPLIER_PER_ITERATION)) { bm.setBlocksReplWorkMultiplier( adjustNewVal( DFS_NAMENODE_REPLICATION_WORK_MULTIPLIER_PER_ITERATION_DEFAULT, newVal)); newSetting = bm.getBlocksReplWorkMultiplier(); } else { throw new IllegalArgumentException("Unexpected property " + property + "in reconfReplicationParameters"); } LOG.info("RECONFIGURE* changed {} to {}", property, newSetting); return String.valueOf(newSetting); } catch (IllegalArgumentException e) { throw new ReconfigurationException(property, newVal, getConf().get( property), e); } finally { namesystem.writeUnlock(); } } private int adjustNewVal(int defaultVal, String newVal) { if (newVal == null) { return defaultVal; } else { return Integer.parseInt(newVal); } } private String reconfHeartbeatInterval(final DatanodeManager datanodeManager, final String property, final String newVal) throws ReconfigurationException { namesystem.writeLock(); try { if (newVal == null) { // set to default datanodeManager.setHeartbeatInterval(DFS_HEARTBEAT_INTERVAL_DEFAULT); return String.valueOf(DFS_HEARTBEAT_INTERVAL_DEFAULT); } else { long newInterval = getConf() .getTimeDurationHelper(DFS_HEARTBEAT_INTERVAL_KEY, newVal, TimeUnit.SECONDS); datanodeManager.setHeartbeatInterval(newInterval); return String.valueOf(datanodeManager.getHeartbeatInterval()); } } catch (NumberFormatException nfe) { throw new ReconfigurationException(property, newVal, getConf().get( property), nfe); } finally { namesystem.writeUnlock(); LOG.info("RECONFIGURE* changed heartbeatInterval to " + datanodeManager.getHeartbeatInterval()); } } private String reconfHeartbeatRecheckInterval( final DatanodeManager datanodeManager, final String property, final String newVal) throws ReconfigurationException { namesystem.writeLock(); try { if (newVal == null) { // set to default datanodeManager.setHeartbeatRecheckInterval( DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_DEFAULT); return String.valueOf(DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_DEFAULT); } else { datanodeManager.setHeartbeatRecheckInterval(Integer.parseInt(newVal)); return String.valueOf(datanodeManager.getHeartbeatRecheckInterval()); } } catch (NumberFormatException nfe) { throw new ReconfigurationException(property, newVal, getConf().get( property), nfe); } finally { namesystem.writeUnlock(); LOG.info("RECONFIGURE* changed heartbeatRecheckInterval to " + datanodeManager.getHeartbeatRecheckInterval()); } } private String reconfProtectedDirectories(String newVal) { return getNamesystem().getFSDirectory().setProtectedDirectories(newVal); } private String reconfCallerContextEnabled(String newVal) { Boolean callerContextEnabled; if (newVal == null) { callerContextEnabled = HADOOP_CALLER_CONTEXT_ENABLED_DEFAULT; } else { callerContextEnabled = Boolean.parseBoolean(newVal); } namesystem.setCallerContextEnabled(callerContextEnabled); return Boolean.toString(callerContextEnabled); } String reconfigureIPCBackoffEnabled(String newVal) { boolean clientBackoffEnabled; if (newVal == null) { clientBackoffEnabled = IPC_BACKOFF_ENABLE_DEFAULT; } else { clientBackoffEnabled = Boolean.parseBoolean(newVal); } rpcServer.getClientRpcServer() .setClientBackoffEnabled(clientBackoffEnabled); return Boolean.toString(clientBackoffEnabled); } String reconfigureSPSModeEvent(String newVal, String property) throws ReconfigurationException { if (newVal == null || StoragePolicySatisfierMode.fromString(newVal) == null) { throw new ReconfigurationException(property, newVal, getConf().get(property), new HadoopIllegalArgumentException( "For enabling or disabling storage policy satisfier, must " + "pass either internal/external/none string value only")); } if (!isActiveState()) { throw new ReconfigurationException(property, newVal, getConf().get(property), new HadoopIllegalArgumentException( "Enabling or disabling storage policy satisfier service on " + state + " NameNode is not allowed")); } StoragePolicySatisfierMode mode = StoragePolicySatisfierMode .fromString(newVal); if (mode == StoragePolicySatisfierMode.NONE) { // disabling sps service if (namesystem.getBlockManager().getSPSManager() != null) { namesystem.getBlockManager().getSPSManager().changeModeEvent(mode); namesystem.getBlockManager().disableSPS(); } } else { // enabling sps service boolean spsCreated = (namesystem.getBlockManager() .getSPSManager() != null); if (!spsCreated) { spsCreated = namesystem.getBlockManager().createSPSManager(getConf(), newVal); } if (spsCreated) { namesystem.getBlockManager().getSPSManager().changeModeEvent(mode); } } return newVal; } @Override // ReconfigurableBase protected Configuration getNewConf() { return new HdfsConfiguration(); } }
HDFS-14456:HAState#prepareToEnterState neednt a lock (#770) Contributed by hunshenshi.
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
HDFS-14456:HAState#prepareToEnterState neednt a lock (#770) Contributed by hunshenshi.
Java
apache-2.0
4501a21c136deb94aa7c14e7e37e8a56da96979a
0
randymay/centaur
package org.blaazinsoftware.centaur.service; import com.google.appengine.api.datastore.Entity; import com.google.appengine.api.datastore.Key; import com.google.appengine.api.datastore.KeyFactory; import org.apache.commons.beanutils.PropertyUtils; import org.apache.commons.lang3.StringUtils; import org.blaazinsoftware.centaur.CentaurException; import org.blaazinsoftware.centaur.annotation.AppEngineKind; import org.blaazinsoftware.centaur.annotation.AppEngineName; import java.beans.PropertyDescriptor; import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.List; final class CentaurServiceUtils { protected static <T> Key getKey(T object) throws CentaurException { for (PropertyDescriptor descriptor : PropertyUtils.getPropertyDescriptors(object)) { if (Key.class.equals(descriptor.getPropertyType())) { Method readMethod = PropertyUtils.getReadMethod(descriptor); if (readMethod == null) { throw new CentaurException("No read property found for the type: " + Key.class.getSimpleName()); } else { try { return (Key) readMethod.invoke(object); } catch (Exception e) { throw new CentaurException(e); } } } } return null; } protected static <T> void setKey(T object, Key key) throws CentaurException { Method writeMethod = PropertyUtils.getWriteMethod(CentaurServiceUtils.getKeyPropertyDescriptor(object)); if (writeMethod == null) { throw new CentaurException("No read property found for the type: " + Key.class.getSimpleName()); } else { try { writeMethod.invoke(object, key); } catch (Exception e) { throw new CentaurException(e); } } } private static <T> PropertyDescriptor getKeyPropertyDescriptor(T object) throws CentaurException { return CentaurServiceUtils.getPropertyDescriptor(object, Key.class); } private static <T> PropertyDescriptor getPropertyDescriptor(T object, Class<?> klass) throws CentaurException { for (PropertyDescriptor descriptor : PropertyUtils.getPropertyDescriptors(object)) { if (klass.equals(descriptor.getPropertyType())) { return descriptor; } } return null; } protected static List<Field> getFieldsByAnnotation(Class<?> klass, Class<? extends Annotation> annotation) throws CentaurException { List<Field> fields = new ArrayList<>(); for (Field field : getAllFieldsInClassAndSuperClass(klass)) { // Using isAnnotationPresent method from Field class. if (field.isAnnotationPresent(annotation)) { fields.add(field); } } return fields; } protected static List<Field> getAllFieldsInClassAndSuperClass(Class<?> klass) { List<Field> fields = new ArrayList<>(); final Field[] declaredFields = klass.getDeclaredFields(); fields.addAll(Arrays.asList(declaredFields)); final Class<?> superclass = klass.getSuperclass(); if (null != superclass) { fields.addAll(CentaurServiceUtils.getAllFieldsInClassAndSuperClass(superclass)); } return fields; } protected static <T> void initKey(T object) throws CentaurException { if (null == CentaurServiceUtils.getKey(object)) { Key key = createKey(object); CentaurServiceUtils.setKey(object, key); } } protected static <T> Key createKey(T object) throws CentaurException { String kind = CentaurServiceUtils.getKindValue(object); String name = CentaurServiceUtils.getNameValue(object); if (object != null && !StringUtils.isEmpty(kind) && !StringUtils.isEmpty(name)) { return KeyFactory.createKey(kind, name); } return null; } protected static <T, X> Key createKey(X parent, T object) throws CentaurException { String kind = CentaurServiceUtils.getKindValue(object); String name = CentaurServiceUtils.getNameValue(object); if (object != null && !StringUtils.isEmpty(kind) && !StringUtils.isEmpty(name)) { Entity parentEntity = new DefaultEntityTranslator().toEntity(parent); return KeyFactory.createKey(parentEntity.getKey(), kind, name); } return null; } protected static <T> String getKindValue(T object) throws CentaurException { try { Field kindField = CentaurServiceUtils.getSingleFieldByAnnotation(object, AppEngineKind.class); return CentaurServiceUtils.getStringValue(object, kindField); } catch (Exception e) { throw new CentaurException(e); } } protected static <T> String getNameValue(T object) throws CentaurException { try { Field kindField = CentaurServiceUtils.getSingleFieldByAnnotation(object, AppEngineName.class); return CentaurServiceUtils.getStringValue(object, kindField); } catch (Exception e) { throw new CentaurException(e); } } private static <T> Field getSingleFieldByAnnotation(T object, Class<? extends Annotation> annotation) throws CentaurException { return CentaurServiceUtils.getSingleFieldByAnnotation(object.getClass(), annotation); } private static Field getSingleFieldByAnnotation(Class<?> klass, Class<? extends Annotation> annotation) throws CentaurException { List<Field> fields = CentaurServiceUtils.getFieldsByAnnotation(klass, annotation); if (fields.size() == 0) { throw new IllegalArgumentException(annotation.getSimpleName() + " is missing"); } else if (fields.size() > 1) { throw new IllegalArgumentException("Only one field of type " + annotation.getSimpleName() + " is allowed"); } return fields.get(0); } private static <T> String getStringValue(T object, Field field) throws CentaurException { try { if (!String.class.equals(field.getType())) { throw new IllegalArgumentException("Field must be String"); } PropertyDescriptor descriptor = PropertyUtils.getPropertyDescriptor(object, field.getName()); Method method = descriptor.getReadMethod(); if (null == method) { throw new IllegalArgumentException("Field marked as " + field.getName() +" missing, or no Read Method found"); } Object value = method.invoke(object); if (null == value) { return null; } else { return value.toString(); } } catch (Exception e) { throw new CentaurException(e); } } }
src/main/java/org/blaazinsoftware/centaur/service/CentaurServiceUtils.java
package org.blaazinsoftware.centaur.service; import com.google.appengine.api.datastore.Entity; import com.google.appengine.api.datastore.Key; import com.google.appengine.api.datastore.KeyFactory; import org.apache.commons.beanutils.PropertyUtils; import org.apache.commons.lang3.StringUtils; import org.blaazinsoftware.centaur.CentaurException; import org.blaazinsoftware.centaur.annotation.AppEngineKind; import org.blaazinsoftware.centaur.annotation.AppEngineName; import java.beans.PropertyDescriptor; import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.List; final class CentaurServiceUtils { protected static <T> Key getKey(T object) throws CentaurException { for (PropertyDescriptor descriptor : PropertyUtils.getPropertyDescriptors(object)) { if (Key.class.equals(descriptor.getPropertyType())) { Method readMethod = PropertyUtils.getReadMethod(descriptor); if (readMethod == null) { throw new CentaurException("No read property found for the type: " + Key.class.getSimpleName()); } else { try { return (Key) readMethod.invoke(object); } catch (Exception e) { throw new CentaurException(e); } } } } return null; } protected static <T> void setKey(T object, Key key) throws CentaurException { Method writeMethod = PropertyUtils.getWriteMethod(CentaurServiceUtils.getKeyPropertyDescriptor(object)); if (writeMethod == null) { throw new CentaurException("No read property found for the type: " + Key.class.getSimpleName()); } else { try { writeMethod.invoke(object, key); } catch (Exception e) { throw new CentaurException(e); } } } private static <T> PropertyDescriptor getKeyPropertyDescriptor(T object) throws CentaurException { return CentaurServiceUtils.getPropertyDescriptor(object, Key.class); } private static <T> PropertyDescriptor getPropertyDescriptor(T object, Class<?> klass) throws CentaurException { for (PropertyDescriptor descriptor : PropertyUtils.getPropertyDescriptors(object)) { if (klass.equals(descriptor.getPropertyType())) { return descriptor; } } return null; } protected static <T> List<Field> getFieldsByAnnotation(T object, Class<? extends Annotation> annotation) throws CentaurException { List<Field> fields = new ArrayList<>(); for (Field field : getAllFieldsInClassAndSuperClass(object.getClass())) { // Using isAnnotationPresent method from Field class. if (field.isAnnotationPresent(annotation)) { fields.add(field); } } return fields; } protected static List<Field> getAllFieldsInClassAndSuperClass(Class<?> klass) { List<Field> fields = new ArrayList<>(); final Field[] declaredFields = klass.getDeclaredFields(); fields.addAll(Arrays.asList(declaredFields)); final Class<?> superclass = klass.getSuperclass(); if (null != superclass) { fields.addAll(CentaurServiceUtils.getAllFieldsInClassAndSuperClass(superclass)); } return fields; } protected static <T> void initKey(T object) throws CentaurException { if (null == CentaurServiceUtils.getKey(object)) { Key key = createKey(object); CentaurServiceUtils.setKey(object, key); } } protected static <T> Key createKey(T object) throws CentaurException { String kind = CentaurServiceUtils.getKindValue(object); String name = CentaurServiceUtils.getNameValue(object); if (object != null && !StringUtils.isEmpty(kind) && !StringUtils.isEmpty(name)) { return KeyFactory.createKey(kind, name); } return null; } protected static <T, X> Key createKey(X parent, T object) throws CentaurException { String kind = CentaurServiceUtils.getKindValue(object); String name = CentaurServiceUtils.getNameValue(object); if (object != null && !StringUtils.isEmpty(kind) && !StringUtils.isEmpty(name)) { Entity parentEntity = new DefaultEntityTranslator().toEntity(parent); return KeyFactory.createKey(parentEntity.getKey(), kind, name); } return null; } protected static <T> String getKindValue(T object) throws CentaurException { try { Field kindField = CentaurServiceUtils.getSingleFieldByAnnotation(object, AppEngineKind.class); return CentaurServiceUtils.getStringValue(object, kindField); } catch (Exception e) { throw new CentaurException(e); } } protected static <T> String getNameValue(T object) throws CentaurException { try { Field kindField = CentaurServiceUtils.getSingleFieldByAnnotation(object, AppEngineName.class); return CentaurServiceUtils.getStringValue(object, kindField); } catch (Exception e) { throw new CentaurException(e); } } private static <T> Field getSingleFieldByAnnotation(T object, Class<? extends Annotation> annotation) throws CentaurException { List<Field> fields = CentaurServiceUtils.getFieldsByAnnotation(object, annotation); if (fields.size() == 0) { throw new IllegalArgumentException(annotation.getSimpleName() + " is missing"); } else if (fields.size() > 1) { throw new IllegalArgumentException("Only one field of type " + annotation.getSimpleName() + " is allowed"); } return fields.get(0); } private static <T> String getStringValue(T object, Field field) throws CentaurException { try { if (!String.class.equals(field.getType())) { throw new IllegalArgumentException("Field must be String"); } PropertyDescriptor descriptor = PropertyUtils.getPropertyDescriptor(object, field.getName()); Method method = descriptor.getReadMethod(); if (null == method) { throw new IllegalArgumentException("Field marked as " + field.getName() +" missing, or no Read Method found"); } Object value = method.invoke(object); if (null == value) { return null; } else { return value.toString(); } } catch (Exception e) { throw new CentaurException(e); } } }
Refactor to use Class.
src/main/java/org/blaazinsoftware/centaur/service/CentaurServiceUtils.java
Refactor to use Class.
Java
apache-2.0
be6cf505d50bb18e9ed487429ee7eb20a60b5b09
0
grfeng/conductor,Netflix/conductor,Netflix/conductor,grfeng/conductor,Netflix/conductor,Netflix/conductor,Netflix/conductor,grfeng/conductor,grfeng/conductor,grfeng/conductor,grfeng/conductor
package com.netflix.conductor.grpc.server; import com.google.inject.Inject; import com.netflix.conductor.core.config.Configuration; import io.grpc.BindableService; import io.grpc.Server; import io.grpc.ServerBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.inject.Singleton; import java.io.IOException; import java.util.Arrays; @Singleton public class GRPCServer { private static final Logger logger = LoggerFactory.getLogger(GRPCServer.class); private final Server server; public final static String CONFIG_PORT = "grpc.port"; public final static int CONFIG_PORT_DEFAULT = 8080; @Inject public GRPCServer(Configuration conf, BindableService... services) { final int port = conf.getIntProperty(CONFIG_PORT, CONFIG_PORT_DEFAULT); ServerBuilder<?> builder = ServerBuilder.forPort(port); Arrays.stream(services).forEach(builder::addService); server = builder.build(); } public void start() throws IOException { server.start(); logger.info("grpc: Server started, listening on " + server.getPort()); } public void stop() { if (server != null) { logger.info("grpc: server shutting down"); server.shutdown(); } } }
grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java
package com.netflix.conductor.grpc.server; import com.google.inject.Inject; import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.grpc.TaskServiceGrpc; import com.netflix.conductor.grpc.WorkflowServiceGrpc; import io.grpc.BindableService; import io.grpc.Server; import io.grpc.ServerBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.inject.Singleton; import java.io.IOException; @Singleton public class GRPCServer { private static final Logger logger = LoggerFactory.getLogger(GRPCServer.class); private final Server server; public final static String CONFIG_PORT = "grpc.port"; public final static int CONFIG_PORT_DEFAULT = 8080; @Inject public GRPCServer(Configuration conf, BindableService... services) { final int port = conf.getIntProperty(CONFIG_PORT, CONFIG_PORT_DEFAULT); ServerBuilder<?> builder = ServerBuilder.forPort(port); for (BindableService s : services) { builder.addService(s); } server = builder.build(); } public void start() throws IOException { server.start(); logger.info("grpc: Server started, listening on " + server.getPort()); } public void stop() { if (server != null) { logger.info("grpc: server shutting down"); server.shutdown(); } } }
grpc-server: Make more functional
grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java
grpc-server: Make more functional
Java
apache-2.0
653ba73516e2c27415bd07c25caedc3f60929660
0
aledsage/legacy-brooklyn,andreaturli/legacy-brooklyn,andreaturli/legacy-brooklyn,bmwshop/brooklyn,neykov/incubator-brooklyn,andreaturli/legacy-brooklyn,bmwshop/brooklyn,aledsage/legacy-brooklyn,andreaturli/legacy-brooklyn,bmwshop/brooklyn,bmwshop/brooklyn,andreaturli/legacy-brooklyn,bmwshop/brooklyn,andreaturli/legacy-brooklyn,aledsage/legacy-brooklyn,neykov/incubator-brooklyn,neykov/incubator-brooklyn,aledsage/legacy-brooklyn,aledsage/legacy-brooklyn,neykov/incubator-brooklyn,neykov/incubator-brooklyn,bmwshop/brooklyn,andreaturli/legacy-brooklyn,bmwshop/brooklyn,aledsage/legacy-brooklyn,neykov/incubator-brooklyn,aledsage/legacy-brooklyn
package brooklyn.util.exceptions; import static com.google.common.base.Predicates.instanceOf; import static com.google.common.base.Throwables.getCausalChain; import static com.google.common.collect.Iterables.find; import java.util.NoSuchElementException; import com.google.common.base.Throwables; public class Exceptions { /** * Propagate a {@link Throwable} as a {@link RuntimeException}. * <p> * Like Guava {@link Throwables#propagate(Throwable)} but throws {@link RuntimeInterruptedException} * to handle {@link InterruptedException}s. */ public static RuntimeException propagate(Throwable throwable) { if (throwable instanceof InterruptedException) throw new RuntimeInterruptedException((InterruptedException) throwable); return Throwables.propagate(throwable); } /** * Propagate exceptions which are fatal. * <p> * Propagates only those exceptions which one rarely (if ever) wants to capture, * such as {@link InterruptedException} and {@link Error}s. */ public static void propagateIfFatal(Throwable throwable) { if (throwable instanceof InterruptedException) throw new RuntimeInterruptedException((InterruptedException) throwable); if (throwable instanceof Error) throw (Error) throwable; } // based on jclouds Throwables2 (with guice removed) @SuppressWarnings("unchecked") public static <T extends Throwable> T getFirstThrowableOfType(Throwable from, Class<T> clazz) { try { return (T) find(getCausalChain(from), instanceOf(clazz)); } catch (NoSuchElementException e) { return null; } } }
core/src/main/java/brooklyn/util/exceptions/Exceptions.java
package brooklyn.util.exceptions; import static com.google.common.base.Predicates.instanceOf; import static com.google.common.base.Throwables.getCausalChain; import static com.google.common.collect.Iterables.find; import java.util.NoSuchElementException; import java.util.concurrent.ExecutionException; import com.google.common.base.Throwables; public class Exceptions { /** * Propagate a {@link Throwable} as a {@link RuntimeException}. * <p> * Like Guava {@link Throwables#propagate(Throwable)} but throws {@link RuntimeInterruptedException} * to handle {@link InterruptedException}s and unpacks the {@link Exception#getCause() cause} and propagates * it for {@link ExecutionException}s. */ public static RuntimeException propagate(Throwable throwable) { if (throwable instanceof InterruptedException) throw new RuntimeInterruptedException((InterruptedException) throwable); if (throwable instanceof ExecutionException) return Throwables.propagate(throwable.getCause()); return Throwables.propagate(throwable); } /** * Propagate exceptions which are fatal. * <p> * Propagates only those exceptions which one rarely (if ever) wants to capture, * such as {@link InterruptedException} and {@link Error}s. */ public static void propagateIfFatal(Throwable throwable) { if (throwable instanceof InterruptedException) throw new RuntimeInterruptedException((InterruptedException) throwable); if (throwable instanceof ExecutionException) propagateIfFatal(throwable.getCause()); if (throwable instanceof Error) throw (Error) throwable; } // based on jclouds Throwables2 (with guice removed) @SuppressWarnings("unchecked") public static <T extends Throwable> T getFirstThrowableOfType(Throwable from, Class<T> clazz) { try { return (T) find(getCausalChain(from), instanceOf(clazz)); } catch (NoSuchElementException e) { return null; } } }
Do not unpack ExecutionException when propagating Throwable
core/src/main/java/brooklyn/util/exceptions/Exceptions.java
Do not unpack ExecutionException when propagating Throwable
Java
apache-2.0
bf9e32d59d65afa6425f03e488959c8883038801
0
apurtell/hbase,mahak/hbase,joshelser/hbase,apurtell/hbase,ndimiduk/hbase,vincentpoon/hbase,JingchengDu/hbase,vincentpoon/hbase,Guavus/hbase,narendragoyal/hbase,andrewmains12/hbase,gustavoanatoly/hbase,JingchengDu/hbase,ndimiduk/hbase,joshelser/hbase,lshmouse/hbase,gustavoanatoly/hbase,narendragoyal/hbase,bijugs/hbase,ultratendency/hbase,francisliu/hbase,narendragoyal/hbase,gustavoanatoly/hbase,vincentpoon/hbase,joshelser/hbase,andrewmains12/hbase,Apache9/hbase,andrewmains12/hbase,apurtell/hbase,ndimiduk/hbase,andrewmains12/hbase,Guavus/hbase,ndimiduk/hbase,bijugs/hbase,HubSpot/hbase,Guavus/hbase,Apache9/hbase,ndimiduk/hbase,Apache9/hbase,ChinmaySKulkarni/hbase,Guavus/hbase,vincentpoon/hbase,gustavoanatoly/hbase,Eshcar/hbase,apurtell/hbase,Guavus/hbase,bijugs/hbase,apurtell/hbase,Guavus/hbase,andrewmains12/hbase,ChinmaySKulkarni/hbase,apurtell/hbase,vincentpoon/hbase,SeekerResource/hbase,lshmouse/hbase,Guavus/hbase,Guavus/hbase,Guavus/hbase,lshmouse/hbase,Apache9/hbase,joshelser/hbase,narendragoyal/hbase,Apache9/hbase,Apache9/hbase,vincentpoon/hbase,andrewmains12/hbase,francisliu/hbase,HubSpot/hbase,ndimiduk/hbase,juwi/hbase,joshelser/hbase,apurtell/hbase,ChinmaySKulkarni/hbase,ultratendency/hbase,bijugs/hbase,apurtell/hbase,narendragoyal/hbase,gustavoanatoly/hbase,francisliu/hbase,SeekerResource/hbase,juwi/hbase,juwi/hbase,SeekerResource/hbase,vincentpoon/hbase,Eshcar/hbase,lshmouse/hbase,joshelser/hbase,apurtell/hbase,ndimiduk/hbase,HubSpot/hbase,bijugs/hbase,mahak/hbase,andrewmains12/hbase,mahak/hbase,SeekerResource/hbase,ndimiduk/hbase,francisliu/hbase,HubSpot/hbase,HubSpot/hbase,francisliu/hbase,francisliu/hbase,joshelser/hbase,ultratendency/hbase,ultratendency/hbase,HubSpot/hbase,mahak/hbase,JingchengDu/hbase,bijugs/hbase,HubSpot/hbase,SeekerResource/hbase,andrewmains12/hbase,ultratendency/hbase,ultratendency/hbase,lshmouse/hbase,vincentpoon/hbase,SeekerResource/hbase,lshmouse/hbase,Eshcar/hbase,JingchengDu/hbase,SeekerResource/hbase,narendragoyal/hbase,gustavoanatoly/hbase,SeekerResource/hbase,juwi/hbase,narendragoyal/hbase,lshmouse/hbase,HubSpot/hbase,Apache9/hbase,mahak/hbase,JingchengDu/hbase,gustavoanatoly/hbase,narendragoyal/hbase,Eshcar/hbase,mahak/hbase,joshelser/hbase,Guavus/hbase,lshmouse/hbase,Apache9/hbase,mahak/hbase,juwi/hbase,andrewmains12/hbase,lshmouse/hbase,Eshcar/hbase,JingchengDu/hbase,gustavoanatoly/hbase,juwi/hbase,bijugs/hbase,bijugs/hbase,joshelser/hbase,JingchengDu/hbase,bijugs/hbase,ChinmaySKulkarni/hbase,JingchengDu/hbase,bijugs/hbase,mahak/hbase,Eshcar/hbase,francisliu/hbase,Apache9/hbase,lshmouse/hbase,ChinmaySKulkarni/hbase,Eshcar/hbase,SeekerResource/hbase,HubSpot/hbase,ultratendency/hbase,ultratendency/hbase,ChinmaySKulkarni/hbase,ChinmaySKulkarni/hbase,Eshcar/hbase,Eshcar/hbase,francisliu/hbase,mahak/hbase,ndimiduk/hbase,ultratendency/hbase,apurtell/hbase,ChinmaySKulkarni/hbase,SeekerResource/hbase,juwi/hbase,narendragoyal/hbase,francisliu/hbase,francisliu/hbase,JingchengDu/hbase,joshelser/hbase,ultratendency/hbase,mahak/hbase,ChinmaySKulkarni/hbase,gustavoanatoly/hbase,vincentpoon/hbase,HubSpot/hbase,vincentpoon/hbase,andrewmains12/hbase,narendragoyal/hbase,ndimiduk/hbase,juwi/hbase,gustavoanatoly/hbase,Eshcar/hbase,ChinmaySKulkarni/hbase,Apache9/hbase,JingchengDu/hbase,juwi/hbase
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.mttr; import static org.junit.Assert.assertEquals; import java.io.IOException; import java.util.ArrayList; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.commons.math.stat.descriptive.DescriptiveStatistics; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.InvalidFamilyOperationException; import org.apache.hadoop.hbase.NamespaceExistException; import org.apache.hadoop.hbase.NamespaceNotFoundException; import org.apache.hadoop.hbase.TableExistsException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.testclassification.IntegrationTests; import org.apache.hadoop.hbase.chaos.actions.Action; import org.apache.hadoop.hbase.chaos.actions.MoveRegionsOfTableAction; import org.apache.hadoop.hbase.chaos.actions.RestartActiveMasterAction; import org.apache.hadoop.hbase.chaos.actions.RestartRsHoldingMetaAction; import org.apache.hadoop.hbase.chaos.actions.RestartRsHoldingTableAction; import org.apache.hadoop.hbase.chaos.factories.MonkeyConstants; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.RetriesExhaustedException; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.coprocessor.CoprocessorException; import org.apache.hadoop.hbase.filter.KeyOnlyFilter; import org.apache.hadoop.hbase.ipc.FatalConnectionException; import org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException; import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.LoadTestTool; import org.apache.htrace.Span; import org.apache.htrace.Trace; import org.apache.htrace.TraceScope; import org.apache.htrace.impl.AlwaysSampler; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import com.google.common.base.Objects; /** * Integration test that should benchmark how fast HBase can recover from failures. This test starts * different threads: * <ol> * <li> * Load Test Tool.<br/> * This runs so that all RegionServers will have some load and WALs will be full. * </li> * <li> * Scan thread.<br/> * This thread runs a very short scan over and over again recording how log it takes to respond. * The longest response is assumed to be the time it took to recover. * </li> * <li> * Put thread.<br/> * This thread just like the scan thread except it does a very small put. * </li> * <li> * Admin thread. <br/> * This thread will continually go to the master to try and get the cluster status. Just like the * put and scan threads, the time to respond is recorded. * </li> * <li> * Chaos Monkey thread.<br/> * This thread runs a ChaosMonkey.Action. * </li> * </ol> * <p/> * The ChaosMonkey actions currently run are: * <ul> * <li>Restart the RegionServer holding meta.</li> * <li>Move the Regions of meta.</li> * <li>Restart the RegionServer holding the table the scan and put threads are targeting.</li> * <li>Move the Regions of the table used by the scan and put threads.</li> * <li>Restart the master.</li> * </ul> * <p/> * At the end of the test a log line is output on the INFO level containing the timing data that was * collected. */ @Category(IntegrationTests.class) public class IntegrationTestMTTR { /** * Constants. */ private static final byte[] FAMILY = Bytes.toBytes("d"); private static final Log LOG = LogFactory.getLog(IntegrationTestMTTR.class); private static long sleepTime; private static final String SLEEP_TIME_KEY = "hbase.IntegrationTestMTTR.sleeptime"; private static final long SLEEP_TIME_DEFAULT = 60 * 1000l; /** * Configurable table names. */ private static TableName tableName; private static TableName loadTableName; /** * Util to get at the cluster. */ private static IntegrationTestingUtility util; /** * Executor for test threads. */ private static ExecutorService executorService; /** * All of the chaos monkey actions used. */ private static Action restartRSAction; private static Action restartMetaAction; private static Action moveMetaRegionsAction; private static Action moveRegionAction; private static Action restartMasterAction; /** * The load test tool used to create load and make sure that WALs aren't empty. */ private static LoadTestTool loadTool; @BeforeClass public static void setUp() throws Exception { // Set up the integration test util if (util == null) { util = new IntegrationTestingUtility(); } // Make sure there are three servers. util.initializeCluster(3); // Set up the load test tool. loadTool = new LoadTestTool(); loadTool.setConf(util.getConfiguration()); // Create executor with enough threads to restart rs's, // run scans, puts, admin ops and load test tool. executorService = Executors.newFixedThreadPool(8); // Set up the tables needed. setupTables(); // Set up the actions. sleepTime = util.getConfiguration().getLong(SLEEP_TIME_KEY, SLEEP_TIME_DEFAULT); setupActions(); } private static void setupActions() throws IOException { // allow a little more time for RS restart actions because RS start depends on having a master // to report to and the master is also being monkeyed. util.getConfiguration().setLong(Action.START_RS_TIMEOUT_KEY, 3 * 60 * 1000); // Set up the action that will restart a region server holding a region from our table // because this table should only have one region we should be good. restartRSAction = new RestartRsHoldingTableAction(sleepTime, util.getConnection().getRegionLocator(tableName)); // Set up the action that will kill the region holding meta. restartMetaAction = new RestartRsHoldingMetaAction(sleepTime); // Set up the action that will move the regions of meta. moveMetaRegionsAction = new MoveRegionsOfTableAction(sleepTime, MonkeyConstants.DEFAULT_MOVE_REGIONS_MAX_TIME, TableName.META_TABLE_NAME); // Set up the action that will move the regions of our table. moveRegionAction = new MoveRegionsOfTableAction(sleepTime, MonkeyConstants.DEFAULT_MOVE_REGIONS_MAX_TIME, tableName); // Kill the master restartMasterAction = new RestartActiveMasterAction(1000); // Give the action the access to the cluster. Action.ActionContext actionContext = new Action.ActionContext(util); restartRSAction.init(actionContext); restartMetaAction.init(actionContext); moveMetaRegionsAction.init(actionContext); moveRegionAction.init(actionContext); restartMasterAction.init(actionContext); } private static void setupTables() throws IOException { // Get the table name. tableName = TableName.valueOf(util.getConfiguration() .get("hbase.IntegrationTestMTTR.tableName", "IntegrationTestMTTR")); loadTableName = TableName.valueOf(util.getConfiguration() .get("hbase.IntegrationTestMTTR.loadTableName", "IntegrationTestMTTRLoadTestTool")); if (util.getHBaseAdmin().tableExists(tableName)) { util.deleteTable(tableName); } if (util.getHBaseAdmin().tableExists(loadTableName)) { util.deleteTable(loadTableName); } // Create the table. If this fails then fail everything. HTableDescriptor tableDescriptor = new HTableDescriptor(tableName); // Make the max file size huge so that splits don't happen during the test. tableDescriptor.setMaxFileSize(Long.MAX_VALUE); HColumnDescriptor descriptor = new HColumnDescriptor(FAMILY); descriptor.setMaxVersions(1); tableDescriptor.addFamily(descriptor); util.getHBaseAdmin().createTable(tableDescriptor); // Setup the table for LoadTestTool int ret = loadTool.run(new String[]{"-tn", loadTableName.getNameAsString(), "-init_only"}); assertEquals("Failed to initialize LoadTestTool", 0, ret); } @AfterClass public static void after() throws IOException { // Clean everything up. util.restoreCluster(); util = null; // Stop the threads so that we know everything is complete. executorService.shutdown(); executorService = null; // Clean up the actions. moveRegionAction = null; restartMetaAction = null; moveMetaRegionsAction = null; restartRSAction = null; restartMasterAction = null; loadTool = null; } @Test public void testRestartRsHoldingTable() throws Exception { run(new ActionCallable(restartRSAction), "RestartRsHoldingTableAction"); } @Test public void testKillRsHoldingMeta() throws Exception { run(new ActionCallable(restartMetaAction), "KillRsHoldingMeta"); } @Test public void testMoveMeta() throws Exception { run(new ActionCallable(moveMetaRegionsAction), "MoveMeta"); } @Test public void testMoveRegion() throws Exception { run(new ActionCallable(moveRegionAction), "MoveRegion"); } @Test public void testRestartMaster() throws Exception { run(new ActionCallable(restartMasterAction), "RestartMaster"); } public void run(Callable<Boolean> monkeyCallable, String testName) throws Exception { int maxIters = util.getHBaseClusterInterface().isDistributedCluster() ? 10 : 3; LOG.info("Starting " + testName + " with " + maxIters + " iterations."); // Array to keep track of times. ArrayList<TimingResult> resultPuts = new ArrayList<TimingResult>(maxIters); ArrayList<TimingResult> resultScan = new ArrayList<TimingResult>(maxIters); ArrayList<TimingResult> resultAdmin = new ArrayList<TimingResult>(maxIters); long start = System.nanoTime(); try { // We're going to try this multiple times for (int fullIterations = 0; fullIterations < maxIters; fullIterations++) { // Create and start executing a callable that will kill the servers Future<Boolean> monkeyFuture = executorService.submit(monkeyCallable); // Pass that future to the timing Callables. Future<TimingResult> putFuture = executorService.submit(new PutCallable(monkeyFuture)); Future<TimingResult> scanFuture = executorService.submit(new ScanCallable(monkeyFuture)); Future<TimingResult> adminFuture = executorService.submit(new AdminCallable(monkeyFuture)); Future<Boolean> loadFuture = executorService.submit(new LoadCallable(monkeyFuture)); monkeyFuture.get(); loadFuture.get(); // Get the values from the futures. TimingResult putTime = putFuture.get(); TimingResult scanTime = scanFuture.get(); TimingResult adminTime = adminFuture.get(); // Store the times to display later. resultPuts.add(putTime); resultScan.add(scanTime); resultAdmin.add(adminTime); // Wait some time for everything to settle down. Thread.sleep(5000l); } } catch (Exception e) { long runtimeMs = TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS); LOG.info(testName + " failed after " + runtimeMs + "ms.", e); throw e; } long runtimeMs = TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS); Objects.ToStringHelper helper = Objects.toStringHelper("MTTRResults") .add("putResults", resultPuts) .add("scanResults", resultScan) .add("adminResults", resultAdmin) .add("totalRuntimeMs", runtimeMs) .add("name", testName); // Log the info LOG.info(helper.toString()); } /** * Class to store results of TimingCallable. * * Stores times and trace id. */ private static class TimingResult { DescriptiveStatistics stats = new DescriptiveStatistics(); ArrayList<Long> traces = new ArrayList<Long>(10); /** * Add a result to this aggregate result. * @param time Time in nanoseconds * @param span Span. To be kept if the time taken was over 1 second */ public void addResult(long time, Span span) { stats.addValue(TimeUnit.MILLISECONDS.convert(time, TimeUnit.NANOSECONDS)); if (TimeUnit.SECONDS.convert(time, TimeUnit.NANOSECONDS) >= 1) { traces.add(span.getTraceId()); } } @Override public String toString() { Objects.ToStringHelper helper = Objects.toStringHelper(this) .add("numResults", stats.getN()) .add("minTime", stats.getMin()) .add("meanTime", stats.getMean()) .add("maxTime", stats.getMax()) .add("25th", stats.getPercentile(25)) .add("50th", stats.getPercentile(50)) .add("75th", stats.getPercentile(75)) .add("90th", stats.getPercentile(90)) .add("95th", stats.getPercentile(95)) .add("99th", stats.getPercentile(99)) .add("99.9th", stats.getPercentile(99.9)) .add("99.99th", stats.getPercentile(99.99)) .add("traces", traces); return helper.toString(); } } /** * Base class for actions that need to record the time needed to recover from a failure. */ static abstract class TimingCallable implements Callable<TimingResult> { protected final Future<?> future; public TimingCallable(Future<?> f) { future = f; } @Override public TimingResult call() throws Exception { TimingResult result = new TimingResult(); final int maxIterations = 10; int numAfterDone = 0; int resetCount = 0; // Keep trying until the rs is back up and we've gotten a put through while (numAfterDone < maxIterations) { long start = System.nanoTime(); TraceScope scope = null; try { scope = Trace.startSpan(getSpanName(), AlwaysSampler.INSTANCE); boolean actionResult = doAction(); if (actionResult && future.isDone()) { numAfterDone++; } // the following Exceptions derive from DoNotRetryIOException. They are considered // fatal for the purpose of this test. If we see one of these, it means something is // broken and needs investigation. This is not the case for all children of DNRIOE. // Unfortunately, this is an explicit enumeration and will need periodically refreshed. // See HBASE-9655 for further discussion. } catch (AccessDeniedException e) { throw e; } catch (CoprocessorException e) { throw e; } catch (FatalConnectionException e) { throw e; } catch (InvalidFamilyOperationException e) { throw e; } catch (NamespaceExistException e) { throw e; } catch (NamespaceNotFoundException e) { throw e; } catch (NoSuchColumnFamilyException e) { throw e; } catch (TableExistsException e) { throw e; } catch (TableNotFoundException e) { throw e; } catch (RetriesExhaustedException e){ throw e; // Everything else is potentially recoverable on the application side. For instance, a CM // action kills the RS that hosted a scanner the client was using. Continued use of that // scanner should be terminated, but a new scanner can be created and the read attempted // again. } catch (Exception e) { resetCount++; if (resetCount < maxIterations) { LOG.info("Non-fatal exception while running " + this.toString() + ". Resetting loop counter", e); numAfterDone = 0; } else { LOG.info("Too many unexpected Exceptions. Aborting.", e); throw e; } } finally { if (scope != null) { scope.close(); } } result.addResult(System.nanoTime() - start, scope.getSpan()); } return result; } protected abstract boolean doAction() throws Exception; protected String getSpanName() { return this.getClass().getSimpleName(); } @Override public String toString() { return this.getSpanName(); } } /** * Callable that will keep putting small amounts of data into a table * until the future supplied returns. It keeps track of the max time. */ static class PutCallable extends TimingCallable { private final Table table; public PutCallable(Future<?> f) throws IOException { super(f); this.table = util.getConnection().getTable(tableName); } @Override protected boolean doAction() throws Exception { Put p = new Put(Bytes.toBytes(RandomStringUtils.randomAlphanumeric(5))); p.add(FAMILY, Bytes.toBytes("\0"), Bytes.toBytes(RandomStringUtils.randomAscii(5))); table.put(p); return true; } @Override protected String getSpanName() { return "MTTR Put Test"; } } /** * Callable that will keep scanning for small amounts of data until the * supplied future returns. Returns the max time taken to scan. */ static class ScanCallable extends TimingCallable { private final Table table; public ScanCallable(Future<?> f) throws IOException { super(f); this.table = util.getConnection().getTable(tableName); } @Override protected boolean doAction() throws Exception { ResultScanner rs = null; try { Scan s = new Scan(); s.setBatch(2); s.addFamily(FAMILY); s.setFilter(new KeyOnlyFilter()); s.setMaxVersions(1); rs = table.getScanner(s); Result result = rs.next(); return result != null && result.size() > 0; } finally { if (rs != null) { rs.close(); } } } @Override protected String getSpanName() { return "MTTR Scan Test"; } } /** * Callable that will keep going to the master for cluster status. Returns the max time taken. */ static class AdminCallable extends TimingCallable { public AdminCallable(Future<?> f) throws IOException { super(f); } @Override protected boolean doAction() throws Exception { Admin admin = null; try { admin = util.getHBaseAdmin(); ClusterStatus status = admin.getClusterStatus(); return status != null; } finally { if (admin != null) { admin.close(); } } } @Override protected String getSpanName() { return "MTTR Admin Test"; } } static class ActionCallable implements Callable<Boolean> { private final Action action; public ActionCallable(Action action) { this.action = action; } @Override public Boolean call() throws Exception { this.action.perform(); return true; } } /** * Callable used to make sure the cluster has some load on it. * This callable uses LoadTest tool to */ public static class LoadCallable implements Callable<Boolean> { private final Future<?> future; public LoadCallable(Future<?> f) { future = f; } @Override public Boolean call() throws Exception { int colsPerKey = 10; int numServers = util.getHBaseClusterInterface().getInitialClusterStatus().getServersSize(); int numKeys = numServers * 5000; int writeThreads = 10; // Loop until the chaos monkey future is done. // But always go in just in case some action completes quickly do { int ret = loadTool.run(new String[]{ "-tn", loadTableName.getNameAsString(), "-write", String.format("%d:%d:%d", colsPerKey, 500, writeThreads), "-num_keys", String.valueOf(numKeys), "-skip_init" }); assertEquals("Load failed", 0, ret); } while (!future.isDone()); return true; } } }
hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.mttr; import static org.junit.Assert.assertEquals; import java.io.IOException; import java.util.ArrayList; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.commons.math.stat.descriptive.DescriptiveStatistics; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.testclassification.IntegrationTests; import org.apache.hadoop.hbase.InvalidFamilyOperationException; import org.apache.hadoop.hbase.NamespaceExistException; import org.apache.hadoop.hbase.NamespaceNotFoundException; import org.apache.hadoop.hbase.TableExistsException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.chaos.actions.Action; import org.apache.hadoop.hbase.chaos.actions.MoveRegionsOfTableAction; import org.apache.hadoop.hbase.chaos.actions.RestartActiveMasterAction; import org.apache.hadoop.hbase.chaos.actions.RestartRsHoldingMetaAction; import org.apache.hadoop.hbase.chaos.actions.RestartRsHoldingTableAction; import org.apache.hadoop.hbase.chaos.factories.MonkeyConstants; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.RetriesExhaustedException; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.coprocessor.CoprocessorException; import org.apache.hadoop.hbase.filter.KeyOnlyFilter; import org.apache.hadoop.hbase.ipc.FatalConnectionException; import org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException; import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.LoadTestTool; import org.apache.htrace.Span; import org.apache.htrace.Trace; import org.apache.htrace.TraceScope; import org.apache.htrace.impl.AlwaysSampler; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import com.google.common.base.Objects; /** * Integration test that should benchmark how fast HBase can recover from failures. This test starts * different threads: * <ol> * <li> * Load Test Tool.<br/> * This runs so that all RegionServers will have some load and WALs will be full. * </li> * <li> * Scan thread.<br/> * This thread runs a very short scan over and over again recording how log it takes to respond. * The longest response is assumed to be the time it took to recover. * </li> * <li> * Put thread.<br/> * This thread just like the scan thread except it does a very small put. * </li> * <li> * Admin thread. <br/> * This thread will continually go to the master to try and get the cluster status. Just like the * put and scan threads, the time to respond is recorded. * </li> * <li> * Chaos Monkey thread.<br/> * This thread runs a ChaosMonkey.Action. * </li> * </ol> * <p/> * The ChaosMonkey actions currently run are: * <ul> * <li>Restart the RegionServer holding meta.</li> * <li>Restart the RegionServer holding the table the scan and put threads are targeting.</li> * <li>Move the Regions of the table used by the scan and put threads.</li> * <li>Restart the master.</li> * </ul> * <p/> * At the end of the test a log line is output on the INFO level containing the timing data that was * collected. */ @Category(IntegrationTests.class) public class IntegrationTestMTTR { /** * Constants. */ private static final byte[] FAMILY = Bytes.toBytes("d"); private static final Log LOG = LogFactory.getLog(IntegrationTestMTTR.class); private static long sleepTime; private static final String SLEEP_TIME_KEY = "hbase.IntegrationTestMTTR.sleeptime"; private static final long SLEEP_TIME_DEFAULT = 60 * 1000l; /** * Configurable table names. */ private static TableName tableName; private static TableName loadTableName; /** * Util to get at the cluster. */ private static IntegrationTestingUtility util; /** * Executor for test threads. */ private static ExecutorService executorService; /** * All of the chaos monkey actions used. */ private static Action restartRSAction; private static Action restartMetaAction; private static Action moveRegionAction; private static Action restartMasterAction; /** * The load test tool used to create load and make sure that WALs aren't empty. */ private static LoadTestTool loadTool; @BeforeClass public static void setUp() throws Exception { // Set up the integration test util if (util == null) { util = new IntegrationTestingUtility(); } // Make sure there are three servers. util.initializeCluster(3); // Set up the load test tool. loadTool = new LoadTestTool(); loadTool.setConf(util.getConfiguration()); // Create executor with enough threads to restart rs's, // run scans, puts, admin ops and load test tool. executorService = Executors.newFixedThreadPool(8); // Set up the tables needed. setupTables(); // Set up the actions. sleepTime = util.getConfiguration().getLong(SLEEP_TIME_KEY, SLEEP_TIME_DEFAULT); setupActions(); } private static void setupActions() throws IOException { // allow a little more time for RS restart actions because RS start depends on having a master // to report to and the master is also being monkeyed. util.getConfiguration().setLong(Action.START_RS_TIMEOUT_KEY, 3 * 60 * 1000); // Set up the action that will restart a region server holding a region from our table // because this table should only have one region we should be good. restartRSAction = new RestartRsHoldingTableAction(sleepTime, util.getConnection().getRegionLocator(tableName)); // Set up the action that will kill the region holding meta. restartMetaAction = new RestartRsHoldingMetaAction(sleepTime); // Set up the action that will move the regions of our table. moveRegionAction = new MoveRegionsOfTableAction(sleepTime, MonkeyConstants.DEFAULT_MOVE_REGIONS_MAX_TIME, tableName); // Kill the master restartMasterAction = new RestartActiveMasterAction(1000); // Give the action the access to the cluster. Action.ActionContext actionContext = new Action.ActionContext(util); restartRSAction.init(actionContext); restartMetaAction.init(actionContext); moveRegionAction.init(actionContext); restartMasterAction.init(actionContext); } private static void setupTables() throws IOException { // Get the table name. tableName = TableName.valueOf(util.getConfiguration() .get("hbase.IntegrationTestMTTR.tableName", "IntegrationTestMTTR")); loadTableName = TableName.valueOf(util.getConfiguration() .get("hbase.IntegrationTestMTTR.loadTableName", "IntegrationTestMTTRLoadTestTool")); if (util.getHBaseAdmin().tableExists(tableName)) { util.deleteTable(tableName); } if (util.getHBaseAdmin().tableExists(loadTableName)) { util.deleteTable(loadTableName); } // Create the table. If this fails then fail everything. HTableDescriptor tableDescriptor = new HTableDescriptor(tableName); // Make the max file size huge so that splits don't happen during the test. tableDescriptor.setMaxFileSize(Long.MAX_VALUE); HColumnDescriptor descriptor = new HColumnDescriptor(FAMILY); descriptor.setMaxVersions(1); tableDescriptor.addFamily(descriptor); util.getHBaseAdmin().createTable(tableDescriptor); // Setup the table for LoadTestTool int ret = loadTool.run(new String[]{"-tn", loadTableName.getNameAsString(), "-init_only"}); assertEquals("Failed to initialize LoadTestTool", 0, ret); } @AfterClass public static void after() throws IOException { // Clean everything up. util.restoreCluster(); util = null; // Stop the threads so that we know everything is complete. executorService.shutdown(); executorService = null; // Clean up the actions. moveRegionAction = null; restartMetaAction = null; restartRSAction = null; restartMasterAction = null; loadTool = null; } @Test public void testRestartRsHoldingTable() throws Exception { run(new ActionCallable(restartRSAction), "RestartRsHoldingTableAction"); } @Test public void testKillRsHoldingMeta() throws Exception { run(new ActionCallable(restartMetaAction), "KillRsHoldingMeta"); } @Test public void testMoveRegion() throws Exception { run(new ActionCallable(moveRegionAction), "MoveRegion"); } @Test public void testRestartMaster() throws Exception { run(new ActionCallable(restartMasterAction), "RestartMaster"); } public void run(Callable<Boolean> monkeyCallable, String testName) throws Exception { int maxIters = util.getHBaseClusterInterface().isDistributedCluster() ? 10 : 3; LOG.info("Starting " + testName + " with " + maxIters + " iterations."); // Array to keep track of times. ArrayList<TimingResult> resultPuts = new ArrayList<TimingResult>(maxIters); ArrayList<TimingResult> resultScan = new ArrayList<TimingResult>(maxIters); ArrayList<TimingResult> resultAdmin = new ArrayList<TimingResult>(maxIters); long start = System.nanoTime(); try { // We're going to try this multiple times for (int fullIterations = 0; fullIterations < maxIters; fullIterations++) { // Create and start executing a callable that will kill the servers Future<Boolean> monkeyFuture = executorService.submit(monkeyCallable); // Pass that future to the timing Callables. Future<TimingResult> putFuture = executorService.submit(new PutCallable(monkeyFuture)); Future<TimingResult> scanFuture = executorService.submit(new ScanCallable(monkeyFuture)); Future<TimingResult> adminFuture = executorService.submit(new AdminCallable(monkeyFuture)); Future<Boolean> loadFuture = executorService.submit(new LoadCallable(monkeyFuture)); monkeyFuture.get(); loadFuture.get(); // Get the values from the futures. TimingResult putTime = putFuture.get(); TimingResult scanTime = scanFuture.get(); TimingResult adminTime = adminFuture.get(); // Store the times to display later. resultPuts.add(putTime); resultScan.add(scanTime); resultAdmin.add(adminTime); // Wait some time for everything to settle down. Thread.sleep(5000l); } } catch (Exception e) { long runtimeMs = TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS); LOG.info(testName + " failed after " + runtimeMs + "ms.", e); throw e; } long runtimeMs = TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS); Objects.ToStringHelper helper = Objects.toStringHelper("MTTRResults") .add("putResults", resultPuts) .add("scanResults", resultScan) .add("adminResults", resultAdmin) .add("totalRuntimeMs", runtimeMs) .add("name", testName); // Log the info LOG.info(helper.toString()); } /** * Class to store results of TimingCallable. * * Stores times and trace id. */ private static class TimingResult { DescriptiveStatistics stats = new DescriptiveStatistics(); ArrayList<Long> traces = new ArrayList<Long>(10); /** * Add a result to this aggregate result. * @param time Time in nanoseconds * @param span Span. To be kept if the time taken was over 1 second */ public void addResult(long time, Span span) { stats.addValue(TimeUnit.MILLISECONDS.convert(time, TimeUnit.NANOSECONDS)); if (TimeUnit.SECONDS.convert(time, TimeUnit.NANOSECONDS) >= 1) { traces.add(span.getTraceId()); } } @Override public String toString() { Objects.ToStringHelper helper = Objects.toStringHelper(this) .add("numResults", stats.getN()) .add("minTime", stats.getMin()) .add("meanTime", stats.getMean()) .add("maxTime", stats.getMax()) .add("25th", stats.getPercentile(25)) .add("50th", stats.getPercentile(50)) .add("75th", stats.getPercentile(75)) .add("90th", stats.getPercentile(90)) .add("95th", stats.getPercentile(95)) .add("99th", stats.getPercentile(99)) .add("99.9th", stats.getPercentile(99.9)) .add("99.99th", stats.getPercentile(99.99)) .add("traces", traces); return helper.toString(); } } /** * Base class for actions that need to record the time needed to recover from a failure. */ static abstract class TimingCallable implements Callable<TimingResult> { protected final Future<?> future; public TimingCallable(Future<?> f) { future = f; } @Override public TimingResult call() throws Exception { TimingResult result = new TimingResult(); final int maxIterations = 10; int numAfterDone = 0; int resetCount = 0; // Keep trying until the rs is back up and we've gotten a put through while (numAfterDone < maxIterations) { long start = System.nanoTime(); TraceScope scope = null; try { scope = Trace.startSpan(getSpanName(), AlwaysSampler.INSTANCE); boolean actionResult = doAction(); if (actionResult && future.isDone()) { numAfterDone++; } // the following Exceptions derive from DoNotRetryIOException. They are considered // fatal for the purpose of this test. If we see one of these, it means something is // broken and needs investigation. This is not the case for all children of DNRIOE. // Unfortunately, this is an explicit enumeration and will need periodically refreshed. // See HBASE-9655 for further discussion. } catch (AccessDeniedException e) { throw e; } catch (CoprocessorException e) { throw e; } catch (FatalConnectionException e) { throw e; } catch (InvalidFamilyOperationException e) { throw e; } catch (NamespaceExistException e) { throw e; } catch (NamespaceNotFoundException e) { throw e; } catch (NoSuchColumnFamilyException e) { throw e; } catch (TableExistsException e) { throw e; } catch (TableNotFoundException e) { throw e; } catch (RetriesExhaustedException e){ throw e; // Everything else is potentially recoverable on the application side. For instance, a CM // action kills the RS that hosted a scanner the client was using. Continued use of that // scanner should be terminated, but a new scanner can be created and the read attempted // again. } catch (Exception e) { resetCount++; if (resetCount < maxIterations) { LOG.info("Non-fatal exception while running " + this.toString() + ". Resetting loop counter", e); numAfterDone = 0; } else { LOG.info("Too many unexpected Exceptions. Aborting.", e); throw e; } } finally { if (scope != null) { scope.close(); } } result.addResult(System.nanoTime() - start, scope.getSpan()); } return result; } protected abstract boolean doAction() throws Exception; protected String getSpanName() { return this.getClass().getSimpleName(); } @Override public String toString() { return this.getSpanName(); } } /** * Callable that will keep putting small amounts of data into a table * until the future supplied returns. It keeps track of the max time. */ static class PutCallable extends TimingCallable { private final Table table; public PutCallable(Future<?> f) throws IOException { super(f); this.table = util.getConnection().getTable(tableName); } @Override protected boolean doAction() throws Exception { Put p = new Put(Bytes.toBytes(RandomStringUtils.randomAlphanumeric(5))); p.add(FAMILY, Bytes.toBytes("\0"), Bytes.toBytes(RandomStringUtils.randomAscii(5))); table.put(p); return true; } @Override protected String getSpanName() { return "MTTR Put Test"; } } /** * Callable that will keep scanning for small amounts of data until the * supplied future returns. Returns the max time taken to scan. */ static class ScanCallable extends TimingCallable { private final Table table; public ScanCallable(Future<?> f) throws IOException { super(f); this.table = util.getConnection().getTable(tableName); } @Override protected boolean doAction() throws Exception { ResultScanner rs = null; try { Scan s = new Scan(); s.setBatch(2); s.addFamily(FAMILY); s.setFilter(new KeyOnlyFilter()); s.setMaxVersions(1); rs = table.getScanner(s); Result result = rs.next(); return result != null && result.size() > 0; } finally { if (rs != null) { rs.close(); } } } @Override protected String getSpanName() { return "MTTR Scan Test"; } } /** * Callable that will keep going to the master for cluster status. Returns the max time taken. */ static class AdminCallable extends TimingCallable { public AdminCallable(Future<?> f) throws IOException { super(f); } @Override protected boolean doAction() throws Exception { Admin admin = null; try { admin = util.getHBaseAdmin(); ClusterStatus status = admin.getClusterStatus(); return status != null; } finally { if (admin != null) { admin.close(); } } } @Override protected String getSpanName() { return "MTTR Admin Test"; } } static class ActionCallable implements Callable<Boolean> { private final Action action; public ActionCallable(Action action) { this.action = action; } @Override public Boolean call() throws Exception { this.action.perform(); return true; } } /** * Callable used to make sure the cluster has some load on it. * This callable uses LoadTest tool to */ public static class LoadCallable implements Callable<Boolean> { private final Future<?> future; public LoadCallable(Future<?> f) { future = f; } @Override public Boolean call() throws Exception { int colsPerKey = 10; int numServers = util.getHBaseClusterInterface().getInitialClusterStatus().getServersSize(); int numKeys = numServers * 5000; int writeThreads = 10; // Loop until the chaos monkey future is done. // But always go in just in case some action completes quickly do { int ret = loadTool.run(new String[]{ "-tn", loadTableName.getNameAsString(), "-write", String.format("%d:%d:%d", colsPerKey, 500, writeThreads), "-num_keys", String.valueOf(numKeys), "-skip_init" }); assertEquals("Load failed", 0, ret); } while (!future.isDone()); return true; } } }
HBASE-13223 Add testMoveMeta to IntegrationTestMTTR Signed-off-by: Andrew Purtell <[email protected]>
hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
HBASE-13223 Add testMoveMeta to IntegrationTestMTTR
Java
apache-2.0
ce0879e91f98f4a733e63402abdb1af92512ff4a
0
rouazana/james,aduprat/james,aduprat/james,chibenwa/james,chibenwa/james,chibenwa/james,rouazana/james,rouazana/james,aduprat/james,chibenwa/james,aduprat/james,rouazana/james
/* ==================================================================== * The Apache Software License, Version 1.1 * * Copyright (c) 2000-2003 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, * if any, must include the following acknowledgment: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowledgment may appear in the software itself, * if and wherever such third-party acknowledgments normally appear. * * 4. The names "Apache", "Jakarta", "JAMES" and "Apache Software Foundation" * must not be used to endorse or promote products derived from this * software without prior written permission. For written * permission, please contact [email protected]. * * 5. Products derived from this software may not be called "Apache", * nor may "Apache" appear in their name, without prior written * permission of the Apache Software Foundation. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * * Portions of this software are based upon public domain software * originally written at the National Center for Supercomputing Applications, * University of Illinois, Urbana-Champaign. */ package org.apache.james.transport.mailets; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.Vector; import javax.mail.MessagingException; import javax.mail.internet.MimeMessage; import org.apache.james.services.JamesUser; import org.apache.mailet.GenericMailet; import org.apache.mailet.Mail; import org.apache.mailet.MailAddress; import org.apache.mailet.MailetConfig; import org.apache.mailet.UsersRepository; /** * Receives a Mail from JamesSpoolManager and takes care of delivery of the * message to local inboxes. */ public class LocalDelivery extends GenericMailet { private boolean enableAliases; private boolean enableForwarding; private boolean ignoreCase; private String inboxURI; private UsersRepository localusers; private String users; /** * Return a string describing this mailet. * * @return a string describing this mailet */ public String getMailetInfo() { return "Local Delivery Mailet"; } /** * @see org.apache.mailet.Mailet#init(org.apache.mailet.MailetConfig) */ public void init(MailetConfig newConfig) throws MessagingException { super.init(newConfig); if (newConfig.getInitParameter("inboxURI") != null) { inboxURI = newConfig.getInitParameter("inboxURI"); } else { log("No inboxURI defined for LocalDelivery"); } if (newConfig.getInitParameter("users") != null) { users = newConfig.getInitParameter("users"); localusers = getMailetContext().getUserRepository(users); } else { log("No users repository defined for LocalDelivery"); } if (newConfig.getInitParameter("ignoreCase") != null) { ignoreCase = Boolean.valueOf(newConfig.getInitParameter("ignoreCase")).booleanValue(); } else { ignoreCase = false; } if (newConfig.getInitParameter("enableAliases") != null) { enableAliases = Boolean.valueOf(newConfig.getInitParameter("enableAliases")).booleanValue(); } else { enableAliases = false; } if (newConfig.getInitParameter("enableForwarding") != null) { enableForwarding = Boolean.valueOf(newConfig.getInitParameter("enableForwarding")).booleanValue(); } else { enableForwarding = false; } } /* MimeMessage that does NOT change the headers when we save it */ class LocalMimeMessage extends MimeMessage { public LocalMimeMessage(MimeMessage source) throws MessagingException { super(source); } protected void updateHeaders() throws MessagingException { if (getMessageID() == null) super.updateHeaders(); } } /** * Delivers a mail to a local mailbox. * * @param mail the mail being processed * * @throws MessagingException if an error occurs while storing the mail */ public void service(Mail mail) throws MessagingException { Collection recipients = mail.getRecipients(); Collection errors = new Vector(); if (mail == null) { throw new IllegalArgumentException("Mail message to be stored cannot be null."); } MimeMessage message = mail.getMessage(); for (Iterator i = recipients.iterator(); i.hasNext();) { MailAddress recipient = (MailAddress)i.next(); String username = null; if (recipient == null) { throw new IllegalArgumentException("Recipient for mail to be stored cannot be null."); } if (ignoreCase) { username = localusers.getRealName(recipient.getUser()); } else if (localusers.contains(recipient.getUser())) { username = recipient.getUser(); } if (username == null) { StringBuffer errorBuffer = new StringBuffer(128).append("The inbox for user ").append(recipient.getUser()).append( " was not found on this server."); throw new MessagingException(errorBuffer.toString()); } if ((JamesUser)localusers.getUserByName(username) instanceof JamesUser) { JamesUser user = (JamesUser)localusers.getUserByName(username); if (enableAliases || enableForwarding) { if (enableAliases && user.getAliasing()) { username = user.getAlias(); } // Forwarding takes precedence over local aliases if (enableForwarding && user.getForwarding()) { MailAddress forwardTo = user.getForwardingDestination(); if (forwardTo == null) { StringBuffer errorBuffer = new StringBuffer(128).append("Forwarding was enabled for ").append( username).append( " but no forwarding address was set for this account."); throw new MessagingException(errorBuffer.toString()); } recipients = new HashSet(); recipients.add(forwardTo); try { // Add qmail's de facto standard Delivered-To header MimeMessage localMessage = new LocalMimeMessage(message); localMessage.addHeader("Delivered-To", recipient.toString()); localMessage.saveChanges(); getMailetContext().sendMail(mail.getSender(), recipients, localMessage); StringBuffer logBuffer = new StringBuffer(128).append("Mail for ").append(username).append( " forwarded to ").append( forwardTo.toString()); log(logBuffer.toString()); return; } catch (MessagingException me) { StringBuffer logBuffer = new StringBuffer(128).append("Error forwarding mail to ").append( forwardTo.toString()).append( "attempting local delivery"); log(logBuffer.toString()); throw me; } } } } try { getMailetContext().getMailRepository(inboxURI + recipient.getUser() + "/").store(mail); } catch (Exception ex) { getMailetContext().log("Error while storing mail.", ex); errors.add(recipient); } } if (!errors.isEmpty()) { // If there were errors, we redirect the email to the ERROR processor. // In order for this server to meet the requirements of the SMTP specification, // mails on the ERROR processor must be returned to the sender. Note that this // email doesn't include any details regarding the details of the failure(s). // In the future we may wish to address this. getMailetContext().sendMail(mail.getSender(), errors, message, Mail.ERROR); } //We always consume this message mail.setState(Mail.GHOST); } }
src/java/org/apache/james/transport/mailets/LocalDelivery.java
/* ==================================================================== * The Apache Software License, Version 1.1 * * Copyright (c) 2000-2003 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, * if any, must include the following acknowledgment: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowledgment may appear in the software itself, * if and wherever such third-party acknowledgments normally appear. * * 4. The names "Apache", "Jakarta", "JAMES" and "Apache Software Foundation" * must not be used to endorse or promote products derived from this * software without prior written permission. For written * permission, please contact [email protected]. * * 5. Products derived from this software may not be called "Apache", * nor may "Apache" appear in their name, without prior written * permission of the Apache Software Foundation. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * * Portions of this software are based upon public domain software * originally written at the National Center for Supercomputing Applications, * University of Illinois, Urbana-Champaign. */ package org.apache.james.transport.mailets; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.Vector; import javax.mail.MessagingException; import javax.mail.internet.MimeMessage; import org.apache.james.services.JamesUser; import org.apache.mailet.GenericMailet; import org.apache.mailet.Mail; import org.apache.mailet.MailAddress; import org.apache.mailet.MailetConfig; import org.apache.mailet.UsersRepository; /** * Receives a Mail from JamesSpoolManager and takes care of delivery of the * message to local inboxes. */ public class LocalDelivery extends GenericMailet { private boolean enableAliases; private boolean enableForwarding; private boolean ignoreCase; private String inboxURI; private UsersRepository localusers; private String users; /** * Return a string describing this mailet. * * @return a string describing this mailet */ public String getMailetInfo() { return "Local Delivery Mailet"; } /** * @see org.apache.mailet.Mailet#init(org.apache.mailet.MailetConfig) */ public void init(MailetConfig newConfig) throws MessagingException { super.init(newConfig); if (newConfig.getInitParameter("inboxURI") != null) { inboxURI = newConfig.getInitParameter("inboxURI"); } else { log("No inboxURI defined for LocalDelivery"); } if (newConfig.getInitParameter("users") != null) { users = newConfig.getInitParameter("users"); localusers = getMailetContext().getUserRepository(users); } else { log("No users repository defined for LocalDelivery"); } if (newConfig.getInitParameter("ignoreCase") != null) { ignoreCase = Boolean.valueOf(newConfig.getInitParameter("ignoreCase")).booleanValue(); } else { ignoreCase = false; } if (newConfig.getInitParameter("enableAliases") != null) { enableAliases = Boolean.valueOf(newConfig.getInitParameter("enableAliases")).booleanValue(); } else { enableAliases = false; } if (newConfig.getInitParameter("enableForwarding") != null) { enableForwarding = Boolean.valueOf(newConfig.getInitParameter("enableForwarding")).booleanValue(); } else { enableForwarding = false; } } /** * Delivers a mail to a local mailbox. * * @param mail the mail being processed * * @throws MessagingException if an error occurs while storing the mail */ public void service(Mail mail) throws MessagingException { Collection recipients = mail.getRecipients(); Collection errors = new Vector(); if (mail == null) { throw new IllegalArgumentException("Mail message to be stored cannot be null."); } MimeMessage message = mail.getMessage(); for (Iterator i = recipients.iterator(); i.hasNext();) { MailAddress recipient = (MailAddress)i.next(); String username = null; if (recipient == null) { throw new IllegalArgumentException("Recipient for mail to be stored cannot be null."); } if (ignoreCase) { username = localusers.getRealName(recipient.getUser()); } else if (localusers.contains(recipient.getUser())) { username = recipient.getUser(); } if (username == null) { StringBuffer errorBuffer = new StringBuffer(128).append("The inbox for user ").append(recipient.getUser()).append( " was not found on this server."); throw new MessagingException(errorBuffer.toString()); } if ((JamesUser)localusers.getUserByName(username) instanceof JamesUser) { JamesUser user = (JamesUser)localusers.getUserByName(username); if (enableAliases || enableForwarding) { if (enableAliases && user.getAliasing()) { username = user.getAlias(); } // Forwarding takes precedence over local aliases if (enableForwarding && user.getForwarding()) { MailAddress forwardTo = user.getForwardingDestination(); if (forwardTo == null) { StringBuffer errorBuffer = new StringBuffer(128).append("Forwarding was enabled for ").append( username).append( " but no forwarding address was set for this account."); throw new MessagingException(errorBuffer.toString()); } recipients = new HashSet(); recipients.add(forwardTo); try { //Per RFC 1327 (?) MimeMessage localMessage = new MimeMessage(message); localMessage.addHeader("Delivered-To", recipient.toString()); localMessage.saveChanges(); getMailetContext().sendMail(mail.getSender(), recipients, localMessage); StringBuffer logBuffer = new StringBuffer(128).append("Mail for ").append(username).append( " forwarded to ").append( forwardTo.toString()); log(logBuffer.toString()); return; } catch (MessagingException me) { StringBuffer logBuffer = new StringBuffer(128).append("Error forwarding mail to ").append( forwardTo.toString()).append( "attempting local delivery"); log(logBuffer.toString()); throw me; } } } } try { getMailetContext().getMailRepository(inboxURI + recipient.getUser() + "/").store(mail); } catch (Exception ex) { getMailetContext().log("Error while storing mail.", ex); errors.add(recipient); } } if (!errors.isEmpty()) { // If there were errors, we redirect the email to the ERROR processor. // In order for this server to meet the requirements of the SMTP specification, // mails on the ERROR processor must be returned to the sender. Note that this // email doesn't include any details regarding the details of the failure(s). // In the future we may wish to address this. getMailetContext().sendMail(mail.getSender(), errors, message, Mail.ERROR); } //We always consume this message mail.setState(Mail.GHOST); } }
Prevent Message-ID from being changed if present git-svn-id: de9d04cf23151003780adc3e4ddb7078e3680318@108618 13f79535-47bb-0310-9956-ffa450edef68
src/java/org/apache/james/transport/mailets/LocalDelivery.java
Prevent Message-ID from being changed if present
Java
bsd-2-clause
3c6410b8b6efad62e63c26a5bfac985134ae969d
0
clementval/claw-compiler,clementval/claw-compiler
/* * This file is released under terms of BSD license * See LICENSE file for more information */ package cx2x.xcodeml.helper; import cx2x.xcodeml.exception.*; import cx2x.xcodeml.xelement.*; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.w3c.dom.Node; import org.w3c.dom.NamedNodeMap; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.OutputKeys; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.Transformer; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import javax.xml.xpath.*; import java.io.File; import java.util.ArrayList; import java.util.List; /** * The class XelementHelper contains only static method to help manipulating the * raw Elements in the XcodeML representation by using the abstracted Xelements. * * @author clementval */ public class XelementHelper { /** * Get a text attribute value from an element. * @param el The element in which the attribute is searched. * @param attrName The name of the attribute to be found. * @return The attribute's value if the attribute is found. Null otherwise. */ public static String getAttributeValue(XbaseElement el, String attrName){ if(el == null || el.getBaseElement() == null){ return null; } NamedNodeMap attributes = el.getBaseElement().getAttributes(); for (int j = 0; j < attributes.getLength(); j++) { if(attributes.item(j).getNodeName().equals(attrName)){ return attributes.item(j).getNodeValue(); } } return null; } /** * Get a boolean attribute value from an element. * @param el The element in which the attribute is searched. * @param attrName The name of the attribute to be found. * @return The attribute's value if the attribute is found. Null otherwise. */ public static boolean getBooleanAttributeValue(XbaseElement el, String attrName) { if (el == null || el.getBaseElement() == null) { return false; } String value = XelementHelper.getAttributeValue(el, attrName); return value != null && value.equals(XelementName.TRUE); } /** * Find a function definition according to a function call. * @param xcodeml The XcodeML program to search in. * @param fctCall The function call used to find the function definition. * @return A function definition element if found. Null otherwise. */ public static XfunctionDefinition findFunctionDefinition(XcodeProgram xcodeml, XfunctionCall fctCall) { if(xcodeml.getBaseElement() == null){ return null; } String name = fctCall.getName().getValue(); NodeList nList = xcodeml.getBaseElement().getElementsByTagName(XelementName.FCT_DEFINITION); for (int i = 0; i < nList.getLength(); i++) { Node fctDefNode = nList.item(i); if (fctDefNode.getNodeType() == Node.ELEMENT_NODE) { XbaseElement dummyFctDef = new XbaseElement((Element)fctDefNode); Xname fctDefName = findName(dummyFctDef, false); if(name != null && fctDefName.isIdentical(name)){ return new XfunctionDefinition(dummyFctDef.getBaseElement()); } } } return null; } /** * Find all array references elements in a given body. * @param parent The body element to search for the array references. * @return A list of all array references found. */ public static List<XarrayRef> getAllArrayReferences(XbaseElement parent){ List<XarrayRef> references = new ArrayList<>(); NodeList nList = parent.getBaseElement(). getElementsByTagName(XelementName.F_ARRAY_REF); for (int i = 0; i < nList.getLength(); i++) { Node n = nList.item(i); if (n.getNodeType() == Node.ELEMENT_NODE) { Element el = (Element) n; XarrayRef ref = new XarrayRef(el); references.add(ref); } } return references; } /** * <pre> * Find all assignment statement from a node until the end pragma. * * This method use Xpath to select the correct nodes. Xpath 1.0 does not have * the intersect operator but only union. By using the Kaysian Method, we can * it is possible to express the intersection of two node sets. * * $set1[count(.|$set2)=count($set2)] * * In our case, we intersect all assign statments which are next siblings of * the "from" element with all the assign statements which are previous * siblings of the ending pragma. * </pre> * * @param from The element from which the search is initiated. * @param endPragma Value of the end pragma. Search will be performed until * there. * @return A list of all assign statements found. List is empty if no * statements are found. */ public static List<XassignStatement> getArrayAssignInBlock(XbaseElement from, String endPragma) { /* Define all the assign element with array refs which are next siblings of * the "from" element */ String s1 = String.format( "following-sibling::%s[%s]", XelementName.F_ASSIGN_STMT, XelementName.F_ARRAY_REF ); /* Define all the assign element with array refs which are previous siblings * of the end pragma element */ String s2 = String.format( "following-sibling::%s[text()=\"%s\"]/preceding-sibling::%s[%s]", XelementName.PRAGMA_STMT, endPragma, XelementName.F_ASSIGN_STMT, XelementName.F_ARRAY_REF ); // Use the Kaysian method to express the intersect operator String intersect = String.format("%s[count(.|%s)=count(%s)]", s1, s2, s2); List<XassignStatement> assignements = new ArrayList<>(); try { XPathFactory xPathfactory = XPathFactory.newInstance(); XPath xpath = xPathfactory.newXPath(); XPathExpression xpathExpr = xpath.compile(intersect); NodeList output = (NodeList) xpathExpr.evaluate(from.getBaseElement(), XPathConstants.NODESET); for (int i = 0; i < output.getLength(); i++) { Element assign = (Element) output.item(i); assignements.add(new XassignStatement(assign)); } } catch (XPathExpressionException ignored) {} return assignements; } /** * Find all array references in the next children that match the given * criteria. * * This methods use powerful Xpath expression to locate the correct nodes in * the AST * * Here is an example of such a query that return all node that are array * references for the array "array6" with an offset of 0 -1 * * //FarrayRef[varRef[Var[text()="array6"]] and arrayIndex and * arrayIndex[minusExpr[Var and FintConstant[text()="1"]]]] * * @param from The element from which the search is initiated. * @param identifier Identifier of the array. * @param offsets List of offsets to be search for. * @return A list of all array references found. */ public static List<XarrayRef> getAllArrayReferencesByOffsets( XbaseElement from, String identifier, List<Integer> offsets) { String offsetXpath = ""; for (int i = 0; i < offsets.size(); ++i){ if(offsets.get(i) == 0){ offsetXpath += String.format("%s[position()=%s and %s]", XelementName.ARRAY_INDEX, i+1, XelementName.VAR ); } else if(offsets.get(i) > 0) { offsetXpath += String.format("%s[position()=%s and %s[%s and %s[text()=\"%s\"]]]", XelementName.ARRAY_INDEX, i+1, XelementName.MINUS_EXPR, XelementName.VAR, XelementName.F_INT_CONST, offsets.get(i)); } else { offsetXpath += String.format("%s[position()=%s and %s[%s and %s[text()=\"%s\"]]]", XelementName.ARRAY_INDEX, i+1, XelementName.MINUS_EXPR, XelementName.VAR, XelementName.F_INT_CONST, Math.abs(offsets.get(i))); } if(i != offsets.size()-1){ offsetXpath += " and "; } } // Start of the Xpath query String xpathQuery = String.format(".//%s[%s[%s[text()=\"%s\"]] and %s]", XelementName.F_ARRAY_REF, XelementName.VAR_REF, XelementName.VAR, identifier, offsetXpath ); List<XarrayRef> arrayRefs = new ArrayList<>(); try { XPathFactory xPathfactory = XPathFactory.newInstance(); XPath xpath = xPathfactory.newXPath(); XPathExpression xpathExpr = xpath.compile(xpathQuery); NodeList output = (NodeList) xpathExpr.evaluate(from.getBaseElement(), XPathConstants.NODESET); for (int i = 0; i < output.getLength(); i++) { Element arrayRef = (Element) output.item(i); arrayRefs.add(new XarrayRef(arrayRef)); } } catch (XPathExpressionException ignored) { } return arrayRefs; } /** * Find all real constants in the direct children of the given parent. * @param parent Root element to search from. * @return A list of all found real constants. */ public static List<XrealConstant> getRealConstants(XbaseElement parent){ List<XrealConstant> elements = new ArrayList<>(); Node n = parent.getBaseElement().getFirstChild(); while(n != null){ if (n.getNodeType() == Node.ELEMENT_NODE) { Element el = (Element) n; if(el.getTagName().equals(XelementName.F_REAL_CONST)) { XrealConstant ref = new XrealConstant(el); elements.add(ref); } } n = n.getNextSibling(); } return elements; } /** * Insert a function call at the end of a do statement. * @param loop The do statement to insert in. * @param call The function call to be inserted. */ public static void insertFctCallIntoLoop(XdoStatement loop, XfunctionCall call){ loop.getBody().getBaseElement().appendChild(call.getBaseElement().getParentNode()); } /** * Find function definition in the ancestor. * @param child The child element to search from. * @return A XfunctionDefinition object if found. Null otherwise. */ public static XfunctionDefinition findParentFctDef(XbaseElement child){ return findParentOfType(child, XfunctionDefinition.class); } /** * Find do statment in which the child is included if any. * @param child The child element to search from. * @return A XdoStatement object if found. Null otherwise. */ public static XdoStatement findParentDoStmt(XbaseElement child){ return findParentOfType(child, XdoStatement.class); } /** * Find a pragma element in the previous nodes containing a given keyword. * @param from Element to start from. * @param keyword Keyword to be found in the pragma. * @return The pragma if found. Null otherwise. */ public static Xpragma findPreviousPragma(XbaseElement from, String keyword){ if(from == null || from.getBaseElement() == null){ return null; } Node prev = from.getBaseElement().getPreviousSibling(); Node parent = from.getBaseElement(); do { while (prev != null) { if (prev.getNodeType() == Node.ELEMENT_NODE) { Element element = (Element) prev; if (element.getTagName().equals(XelementName.PRAGMA_STMT) && element.getTextContent().toLowerCase(). contains(keyword.toLowerCase())) { return new Xpragma(element); } } prev = prev.getPreviousSibling(); } parent = parent.getParentNode(); prev = parent; } while(parent != null); return null; } /** * Find do statement element. * @param fctDef Function definition to search in. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XdoStatement object if found. Null otherwise. */ public static XdoStatement findDoStatement(XfunctionDefinition fctDef, boolean any){ Xbody body = fctDef.getBody(); return XelementHelper.findDoStatement(body, any); } /** * Find var element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xvar object if found. Null otherwise. */ public static Xvar findVar(XbaseElement parent, boolean any){ return findXelement(parent, any, Xvar.class); } /** * Find varRef element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XvarRef object if found. Null otherwise. */ public static XvarRef findVarRef(XbaseElement parent, boolean any){ return findXelement(parent, any, XvarRef.class); } /** * Find indexRange element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XindexRange object if found. Null otherwise. */ public static XindexRange findIndexRange(XbaseElement parent, boolean any){ return findXelement(parent, any, XindexRange.class); } /** * Find arrayIndex element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XarrayIndex object if found. Null otherwise. */ public static XarrayIndex findArrayIndex(XbaseElement parent, boolean any){ return findXelement(parent, any, XarrayIndex.class); } /** * Find name element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xname object if found. Null otherwise. */ public static Xname findName(XbaseElement parent, boolean any){ return findXelement(parent, any, Xname.class); } /** * Find value element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xvalue object if found. Null otherwise. */ public static Xvalue findValue(XbaseElement parent, boolean any){ return findXelement(parent, any, Xvalue.class); } /** * Find lValueModel element at given position. * @param parent Root element to search from. * @param position Position of the element to be found in the parent children * list. * @return A XLValueModel object if found. Null otherwise. */ public static XLValueModel findLValueModel(XbaseElement parent, int position){ Element element = getXthChildElement(parent.getBaseElement(), position); if(element == null){ return null; } switch (element.getTagName()) { case XelementName.VAR: return new XLValueModel(new Xvar(element)); case XelementName.F_ARRAY_REF: return new XLValueModel(new XarrayRef(element)); case XelementName.F_CHAR_REF: case XelementName.F_MEMBER_REF: case XelementName.F_COARRAY_REF: return null; // TODO when classes are available default: return null; } } /** * Find exprModel element at given position. * @param parent Root element to search from. * @param position Position of the element to be found in the parent children * list. * @return A XexprModel object if found. Null otherwise. */ public static XexprModel findExprModel(XbaseElement parent, int position){ /** An exprModel can be of the following type * - FintConstant, FrealConstant, FcomplexConstant, FcharacterConstant, * FlogicalConstant * TODO FarrayConstructor, FstructConstructor * - FarrayConstructor, FstructConstructor * - Var * TODO FcharacterRef, FmemberRef, FcoArrayRef * - FarrayRef, FcharacterRef, FmemberRef, FcoArrayRef, varRef * - functionCall * - plusExpr, minusExpr, mulExpr, divExpr, FpowerExpr, FconcatExpr * logEQExpr, logNEQExpr, logGEExpr, logGTExpr, logLEExpr, logLTExpr, * logAndExpr, logOrExpr, logEQVExpr, logNEQVExpr, logNotExpr, * unaryMinusExpr, userBinaryExpr, userUnaryExpr * TODO FdoLoop * - FdoLoop */ Element element = getXthChildElement(parent.getBaseElement(), position); if(element == null){ return null; } switch (element.getTagName()){ case XelementName.F_INT_CONST: return new XexprModel(new XintConstant(element)); case XelementName.F_REAL_CONST: return new XexprModel(new XrealConstant(element)); case XelementName.F_LOGICAL_CONST: return new XexprModel(new XlogicalConstant(element)); case XelementName.F_COMPLEX_CONST: return new XexprModel(new XcomplexConstant(element)); case XelementName.F_CHAR_CONST: return new XexprModel(new XcharacterConstant(element)); case XelementName.VAR: return new XexprModel(new Xvar(element)); case XelementName.FCT_CALL: return new XexprModel(new XfunctionCall(element)); case XelementName.F_ARRAY_REF: return new XexprModel(new XarrayRef(element)); case XelementName.VAR_REF: return new XexprModel(new XvarRef(element)); // binary expression case XelementName.DIV_EXPR: case XelementName.F_CONCAT_EXPR: case XelementName.F_POWER_EXPR: case XelementName.LOG_AND_EXPR: case XelementName.LOG_EQ_EXPR: case XelementName.LOG_EQV_EXPR: case XelementName.LOG_GE_EXPR: case XelementName.LOG_GT_EXPR: case XelementName.LOG_LE_EXPR: case XelementName.LOG_LT_EXPR: case XelementName.LOG_NEQ_EXPR: case XelementName.LOG_NEWV_EXPR: case XelementName.LOG_OR_EXPR: case XelementName.MINUS_EXPR: case XelementName.MUL_EXPR: case XelementName.PLUS_EXPR: case XelementName.USER_BINARY_EXPR: return new XexprModel(new XbinaryExpr(element)); // unary expression case XelementName.LOG_NOT_EXPR: case XelementName.UNARY_MINUS_EXPR: case XelementName.USER_UNARY_EXPR: return new XexprModel(new XunaryExpr(element)); default: return null; } } /** * The inner element of a varRef is one of the following: * - Var * - FmemberRef * - FarrayRef * - FcharacterRef * - FcoArrayRef * @param parent The root element to search form. * @return The varRef inner element as a XbaseElement derived type. */ public static XbaseElement findVarRefInnerElement(XbaseElement parent){ Element element = getFirstChildElement(parent.getBaseElement()); if(element == null){ return null; } switch (element.getTagName()) { case XelementName.VAR: return new Xvar(element); case XelementName.F_MEMBER_REF: return null; // TODO move to XmemberRef case XelementName.F_ARRAY_REF: return new XarrayRef(element); case XelementName.F_CHAR_REF: return null; // TODO move to XcharacterRef case XelementName.F_COARRAY_REF: return null; // TODO move to XcoArrayRef default: return null; } } /** * Find constant element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xconstant object if found. Null otherwise. */ public static Xconstant findConstant(XbaseElement parent, boolean any){ // FintConstant, FrealConstant, FcomplexConstant, FcharacterConstant, // FlogicalConstant XintConstant intConst = findIntConstant(parent, any); if(intConst != null){ return intConst; } XrealConstant realConst = findRealConstant(parent, any); if(realConst != null){ return realConst; } XcomplexConstant complexConst = findComplexConstant(parent, any); if(complexConst != null){ return complexConst; } XcharacterConstant charConst = findCharacterConstant(parent, any); if(charConst != null){ return charConst; } XlogicalConstant logConst = findLogicalConstant(parent, any); if(logConst != null){ return logConst; } return null; } /** * Find integer constant element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XintConstant object if found. Null otherwise. */ public static XintConstant findIntConstant(XbaseElement parent, boolean any){ return findXelement(parent, any, XintConstant.class); } /** * Find real constant element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XrealConstant object if found. Null otherwise. */ public static XrealConstant findRealConstant(XbaseElement parent, boolean any){ return findXelement(parent, any, XrealConstant.class); } /** * Find complex constant element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XcomplexConstant object if found. Null otherwise. */ public static XcomplexConstant findComplexConstant(XbaseElement parent, boolean any){ return findXelement(parent, any, XcomplexConstant.class); } /** * Find character constant element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XcharacterConstant object if found. Null otherwise. */ public static XcharacterConstant findCharacterConstant(XbaseElement parent, boolean any){ return findXelement(parent, any, XcharacterConstant.class); } /** * Find logical constant element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XlogicalConstant object if found. Null otherwise. */ public static XlogicalConstant findLogicalConstant(XbaseElement parent, boolean any){ return findXelement(parent, any, XlogicalConstant.class); } /** * Find condition element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xcondition object if found. Null otherwise. */ public static Xcondition findCondition(XbaseElement parent, boolean any){ return findXelement(parent, any, Xcondition.class); } /** * Find then element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xthen object if found. Null otherwise. */ public static Xthen findThen(XbaseElement parent, boolean any){ return findXelement(parent, any, Xthen.class); } /** * Find else element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xelse object if found. Null otherwise. */ public static Xelse findElse(XbaseElement parent, boolean any){ return findXelement(parent, any, Xelse.class); } /** * Find arguments element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XargumentsTable object if found. Null otherwise. */ public static XargumentsTable findArgumentsTable(XbaseElement parent, boolean any){ return findXelement(parent, any, XargumentsTable.class); } /** * Find lowerBound element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XlowerBound object if found. Null otherwise. */ public static XlowerBound findLowerBound(XbaseElement parent, boolean any){ return findXelement(parent, any, XlowerBound.class); } /** * Find upperBound element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XupperBound object if found. Null otherwise. */ public static XupperBound findUpperBound(XbaseElement parent, boolean any){ return findXelement(parent, any, XupperBound.class); } /** * Find step element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xstep object if found. Null otherwise. */ public static Xstep findStep(XbaseElement parent, boolean any){ return findXelement(parent, any, Xstep.class); } /** * Find body element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xbody object if found. Null otherwise. */ public static Xbody findBody(XbaseElement parent, boolean any){ return findXelement(parent, any, Xbody.class); } /** * Find do statement element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XdoStatement object if found. Null otherwise. */ public static XdoStatement findDoStatement(XbaseElement parent, boolean any){ return findXelement(parent, any, XdoStatement.class); } /** * Find the direct next do statement element. * @param from The element to search from. Direct next sibling is searched. * @return A XdoStatement object if it directly follows the given from * element. Null otherwise. */ public static XdoStatement findNextDoStatement(XbaseElement from){ return findNextElementOfType(from, XdoStatement.class); } /** * Find the direct next do statement element. * @param from The element to search from. Direct next sibling is searched. * @param until The element to search until. * @return A XdoStatement object if it directly follows the given from * element. Null otherwise. */ public static XdoStatement findNextDoStatement(XbaseElement from, XbaseElement until) { return findNextElementOfType(from, until, XdoStatement.class); } /** * Find symbols element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XsymbolsTable object if found. Null otherwise. */ public static XsymbolTable findSymbols(XbaseElement parent, boolean any){ return findXelement(parent, any, XsymbolTable.class); } /** * Find declarations element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XdeclTable object if found. Null otherwise. */ public static XdeclTable findDeclarations(XbaseElement parent, boolean any){ return findXelement(parent, any, XdeclTable.class); } /** * Find type table elements. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XtypeTable object if found. Null otherwise. */ public static XtypeTable findTypeTable(XcodeProgram parent, boolean any){ return findXelement(parent, any, XtypeTable.class); } /** * Find global symbols element in the XcodeML representation. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XglobalSymbolTable object if found. Null otherwise. */ public static XglobalSymbolTable findGlobalSymbols(XcodeProgram parent, boolean any) { return findXelement(parent, any, XglobalSymbolTable.class); } /** * Find global declarations element in the XcodeML representation. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XglobalSymbolTable object if found. Null otherwise. */ public static XglobalDeclTable findGlobalDeclarations(XcodeProgram parent, boolean any) { return findXelement(parent, any, XglobalDeclTable.class); } /** * Find params in the XcodeML representation. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xparams object if found. Null otherwise. */ public static Xparams findParams(XbaseElement parent, boolean any){ return findXelement(parent, any, Xparams.class); } /** * Find number of index ranges in an element. * @param parent Root element to search from. * @return The number of index ranges found. */ public static int findNumberOfRange(XbaseElement parent){ int indexCounter = 0; Node node = parent.getBaseElement().getFirstChild(); while(node != null){ if(node.getNodeType() == Node.ELEMENT_NODE){ Element element = (Element)node; if(element.getTagName().equals(XelementName.INDEX_RANGE)){ ++indexCounter; } } } return indexCounter; } /** * Find all the index elements (arrayIndex and indexRange) in an element. * @param parent Root element to search from. * @return A list of all index ranges found. */ public static List<Xindex> findIndexes(XbaseElement parent){ List<Xindex> indexRanges = new ArrayList<>(); if(parent == null || parent.getBaseElement() == null){ return indexRanges; } Node node = parent.getBaseElement().getFirstChild(); while (node != null){ if(node.getNodeType() == Node.ELEMENT_NODE){ Element element = (Element)node; switch (element.getTagName()){ case XelementName.ARRAY_INDEX: indexRanges.add(new XarrayIndex(element)); break; case XelementName.INDEX_RANGE: indexRanges.add(new XindexRange(element)); break; } } node = node.getNextSibling(); } return indexRanges; } /** * Find all the name elements in an element. * @param parent Root element to search from. * @return A list of all name elements found. */ public static List<Xname> findAllNames(XbaseElement parent){ List<Xname> names = new ArrayList<>(); if(parent == null || parent.getBaseElement() == null){ return names; } Node node = parent.getBaseElement().getFirstChild(); while (node != null){ if(node.getNodeType() == Node.ELEMENT_NODE){ Element element = (Element)node; if(element.getTagName().equals(XelementName.NAME)) { names.add(new Xname(element)); } } node = node.getNextSibling(); } return names; } /** * Find len element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xlength object if found. Null otherwise. */ public static Xlength findLen(XbaseElement parent, boolean any){ return findXelement(parent, any, Xlength.class); } /** * Find kind element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xkind object if found. Null otherwise. */ public static Xkind findKind(XbaseElement parent, boolean any){ return findXelement(parent, any, Xkind.class); } /** * Find function call element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XfunctionCall object if found. Null otherwise. */ public static XfunctionCall findFctCall(XbaseElement parent, boolean any){ return findXelement(parent, any, XfunctionCall.class); } /** * Find a function call element nested in the given expression statement. * @param exprStmt The expression statement to search from. * @return A function call element if found. Null otherwise. */ public static XfunctionCall findFctCall(XexprStatement exprStmt){ if(exprStmt == null){ return null; } NodeList nodeList = exprStmt.getBaseElement().getChildNodes(); for (int i = 0; i < nodeList.getLength(); i++) { Node nextNode = nodeList.item(i); if(nextNode.getNodeType() == Node.ELEMENT_NODE){ Element element = (Element) nextNode; if(element.getTagName().equals(XelementName.FCT_CALL)){ return new XfunctionCall(element); } } } return null; } /** * Find if there is a expr statement directly after the given element. * @param from The element to search from * @return An expr statement element if found. Null otherwise. */ public static XexprStatement findNextExprStatement(XbaseElement from){ return findNextElementOfType(from, XexprStatement.class); } /** * Find if there is a do statement directly after the given element. * @param from The element to search from * @return A do statement element if found. Null otherwise. */ public static XdoStatement findDirectNextDoStmt(XbaseElement from){ return findDirectNextElement(from, XdoStatement.class); } /** * Find if there is an if statement directly after the given element. * @param from The element to search from * @return An if statement element if found. Null otherwise. */ public static XifStatement findDirectNextIfStmt(XbaseElement from){ return findDirectNextElement(from, XifStatement.class); } /** * Find if there is an assign statement directly after the given element. * @param from The element to search from * @return An assign statement element if found. Null otherwise. */ public static XassignStatement findDirectNextAssignStmt(XbaseElement from){ return findDirectNextElement(from, XassignStatement.class); } /** * Delete all the elements between the two given pragmas. * @param start The start pragma. Deletion start from next element. * @param end The end pragma. Deletion end just before this element. */ public static void deleteBetween(Xpragma start, Xpragma end){ ArrayList<Element> toDelete = new ArrayList<>(); Node node = start.getBaseElement().getNextSibling(); while (node != null && node != end.getBaseElement()){ if(node.getNodeType() == Node.ELEMENT_NODE){ Element element = (Element)node; toDelete.add(element); } node = node.getNextSibling(); } for(Element e : toDelete){ delete(e); } } /** * Find all the pragma element in an XcodeML tree. * @param xcodeml The XcodeML program to search in. * @return A list of all pragmas found in the XcodeML program. */ public static List<Xpragma> findAllPragmas(XcodeProgram xcodeml){ NodeList pragmaList = xcodeml.getDocument() .getElementsByTagName(XelementName.PRAGMA_STMT); List<Xpragma> pragmas = new ArrayList<>(); for (int i = 0; i < pragmaList.getLength(); i++) { Node pragmaNode = pragmaList.item(i); if (pragmaNode.getNodeType() == Node.ELEMENT_NODE) { Element element = (Element) pragmaNode; pragmas.add(new Xpragma(element)); } } return pragmas; } /** * Check if the two element are direct children of the same parent element. * @param e1 First element. * @param e2 Second element. * @return True if the two element are direct children of the same parent. * False otherwise. */ public static boolean hasSameParentBlock(XbaseElement e1, XbaseElement e2){ if(e1 == null || e2 == null || e1.getBaseElement() == null || e2.getBaseElement() == null) { return false; } return e1.getBaseElement().getParentNode() == e2.getBaseElement().getParentNode(); } /** * Insert all the statements from a given body at the end of another body * @param originalBody The body in which the extra body will be appended * @param extraBody The body that will be appended to the original body * @throws IllegalTransformationException if one of the body or their base * element is null. */ public static void appendBody(Xbody originalBody, Xbody extraBody) throws IllegalTransformationException { if(originalBody == null || originalBody.getBaseElement() == null || extraBody == null || extraBody.getBaseElement() == null) { throw new IllegalTransformationException("One of the body is null."); } // Append content of loop-body (loop) to this loop-body Node childNode = extraBody.getBaseElement().getFirstChild(); while(childNode != null){ Node nextChild = childNode.getNextSibling(); // Do something with childNode, including move or delete... if(childNode.getNodeType() == Node.ELEMENT_NODE){ originalBody.getBaseElement().appendChild(childNode); } childNode = nextChild; } } /** * Extract the body of a do statement and place it directly after it. * @param loop The do statement containing the body to be extracted. */ public static void extractBody(XdoStatement loop){ Element loopElement = loop.getBaseElement(); Element body = XelementHelper.findFirstElement(loopElement, XelementName.BODY); Node refNode = loopElement; if(body == null){ return; } for(Node childNode = body.getFirstChild(); childNode!=null;){ Node nextChild = childNode.getNextSibling(); // Do something with childNode, including move or delete... if(childNode.getNodeType() == Node.ELEMENT_NODE){ XelementHelper.insertAfter(refNode, childNode); refNode = childNode; } childNode = nextChild; } } /** * Delete an element for the tree. * @param element Element to be deleted. */ public static void delete(Element element){ if(element == null || element.getParentNode() == null){ return; } element.getParentNode().removeChild(element); } /** * Write the XcodeML to file or std out * @param xcodeml The XcodeML to write in the output * @param outputFile Path of the output file or null to output on std out * @param indent Number of spaces used for the indentation * @return true if the output could be write without problems. */ public static boolean writeXcodeML(XcodeProgram xcodeml, String outputFile, int indent) { try { XelementHelper.cleanEmptyTextNodes(xcodeml.getDocument()); Transformer transformer = TransformerFactory.newInstance().newTransformer(); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); transformer.setOutputProperty( "{http://xml.apache.org/xslt}indent-amount", Integer.toString(indent)); DOMSource source = new DOMSource(xcodeml.getDocument()); if(outputFile == null){ // Output to console StreamResult console = new StreamResult(System.out); transformer.transform(source, console); } else { // Output to file StreamResult console = new StreamResult(new File(outputFile)); transformer.transform(source, console); } } catch (TransformerConfigurationException ex){ xcodeml.addError("Cannot output file: " + ex.getMessage(), 0); return false; } catch (TransformerException ex){ xcodeml.addError("Cannot output file: " + ex.getMessage(), 0); return false; } return true; } /** * Removes text nodes that only contains whitespace. The conditions for * removing text nodes, besides only containing whitespace, are: If the * parent node has at least one child of any of the following types, all * whitespace-only text-node children will be removed: - ELEMENT child - * CDATA child - COMMENT child. * @param parentNode Root node to start the cleaning. */ public static void cleanEmptyTextNodes(Node parentNode) { boolean removeEmptyTextNodes = false; Node childNode = parentNode.getFirstChild(); while (childNode != null) { removeEmptyTextNodes |= checkNodeTypes(childNode); childNode = childNode.getNextSibling(); } if (removeEmptyTextNodes) { removeEmptyTextNodes(parentNode); } } /** * Validate a string attribute. * @param doc Document in which the attribute must be validated. * @param attrValue Attribute value expected. * @param xpathQuery Xpath query to locate the attribute value. * @return True if the attribute validates. False otherwise. * @throws Exception if xpathQuery cannot be executed. */ public static boolean validateStringAttribute(Document doc, String attrValue , String xpathQuery) throws Exception { XPathFactory xPathfactory = XPathFactory.newInstance(); XPath xpath = xPathfactory.newXPath(); XPathExpression getVersion = xpath.compile(xpathQuery); String outputValue = (String) getVersion.evaluate(doc, XPathConstants.STRING); return outputValue.equals(attrValue); } /** * Insert an element just before a reference element. * @param ref The reference element. * @param insert The element to be inserted. */ public static void insertBefore(XbaseElement ref, XbaseElement insert){ ref.getBaseElement().getParentNode().insertBefore(insert.getBaseElement(), ref.getBaseElement()); } /** * Insert an element just after a reference element. * @param refElement The reference element. * @param element The element to be inserted. */ public static void insertAfter(XbaseElement refElement, XbaseElement element){ XelementHelper.insertAfter(refElement.getBaseElement(), element.getBaseElement()); } /* * PRIVATE SECTION */ /** * Remove all empty text nodes in the subtree. * @param parentNode Root node to start the search. */ private static void removeEmptyTextNodes(Node parentNode) { Node childNode = parentNode.getFirstChild(); while (childNode != null) { // grab the "nextSibling" before the child node is removed Node nextChild = childNode.getNextSibling(); short nodeType = childNode.getNodeType(); if (nodeType == Node.TEXT_NODE) { boolean containsOnlyWhitespace = childNode.getNodeValue() .trim().isEmpty(); if (containsOnlyWhitespace) { parentNode.removeChild(childNode); } } childNode = nextChild; } } /** * Check the type of the given node. * @param childNode Node to be checked. * @return True if the node contains data. False otherwise. */ private static boolean checkNodeTypes(Node childNode) { short nodeType = childNode.getNodeType(); if (nodeType == Node.ELEMENT_NODE) { cleanEmptyTextNodes(childNode); // recurse into subtree } return nodeType == Node.ELEMENT_NODE || nodeType == Node.CDATA_SECTION_NODE || nodeType == Node.COMMENT_NODE; } /** * Insert a node directly after a reference node. * @param refNode The reference node. New node will be inserted after this * one. * @param newNode The new node to be inserted. */ private static void insertAfter(Node refNode, Node newNode){ refNode.getParentNode().insertBefore(newNode, refNode.getNextSibling()); } /** * Find an element of Class T in the nested elements under parent. * @param parent XbaseElement to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @param xElementClass Element's class to be found. * @param <T> Derived class of XbaseElement. * @return An instance of T class if an element is found. Null if no element * is found. */ private static <T extends XbaseElement> T findXelement(XbaseElement parent, boolean any, Class<T> xElementClass) { String elementName = XelementName.getElementNameFromClass(xElementClass); if(elementName == null || parent == null || parent.getBaseElement() == null) { return null; } Element element = findElement(parent, elementName, any); if (element != null){ try{ return xElementClass. getDeclaredConstructor(Element.class).newInstance(element); } catch(Exception ex){ return null; } } return null; } /** * Find any element of the the given Class in the direct children of from * element. Only first level children are search for. * @param from XbaseElement to search from. * @param xElementClass Element's class to be found. * @param <T> Derived class of XbaseElement * @return The first element found under from element. Null if no element is * found. */ private static <T extends XbaseElement> T findNextElementOfType( XbaseElement from, Class<T> xElementClass) { String elementName = XelementName.getElementNameFromClass(xElementClass); if(elementName == null || from == null || from.getBaseElement() == null){ return null; } Node nextNode = from.getBaseElement().getNextSibling(); while (nextNode != null){ if(nextNode.getNodeType() == Node.ELEMENT_NODE){ Element element = (Element) nextNode; if(element.getTagName().equals(elementName)){ try { return xElementClass. getDeclaredConstructor(Element.class).newInstance(element); } catch(Exception ex){ return null; } } } nextNode = nextNode.getNextSibling(); } return null; } /** * Find any element of the the given Class in the direct children of from * element until the end element is reached. * Only first level children are search for. * @param from XbaseElement to search from. * @param until XbaseElement to search until. * @param xElementClass Element's class to be found. * @param <T> Derived class of XbaseElement * @return The first element found under from element. Null if no element is * found. */ private static <T extends XbaseElement> T findNextElementOfType( XbaseElement from, XbaseElement until, Class<T> xElementClass) { String elementName = XelementName.getElementNameFromClass(xElementClass); if(elementName == null || from == null || until == null || from.getBaseElement() == null) { return null; } Node nextNode = from.getBaseElement().getNextSibling(); while (nextNode != null){ if(nextNode.getNodeType() == Node.ELEMENT_NODE){ Element element = (Element) nextNode; if(element == until.getBaseElement()){ // End element is reached return null; } if(element.getTagName().equals(elementName)){ try { return xElementClass. getDeclaredConstructor(Element.class).newInstance(element); } catch(Exception ex){ return null; } } } nextNode = nextNode.getNextSibling(); } return null; } /** * Find element of the the given Class that is directly after the given from * element. * @param from XbaseElement to search from. * @param xElementClass Element's class to be found. * @param <T> Derived class of XbaseElement. * @return Instance of the xElementClass. Null if no element is found. */ private static <T extends XbaseElement> T findDirectNextElement( XbaseElement from, Class<T> xElementClass) { String elementName = XelementName.getElementNameFromClass(xElementClass); if(elementName == null || from == null || from.getBaseElement() == null){ return null; } Node nextNode = from.getBaseElement().getNextSibling(); while (nextNode != null){ if(nextNode.getNodeType() == Node.ELEMENT_NODE){ Element element = (Element) nextNode; if(element.getTagName().equals(elementName)){ try{ return xElementClass. getDeclaredConstructor(Element.class).newInstance(element); } catch(Exception ex){ return null; } } return null; } nextNode = nextNode.getNextSibling(); } return null; } /** * Find a parent element from a child in the ancestors. * @param from The child element to search from. * @return A XbaseElement object if found. Null otherwise. */ private static <T extends XbaseElement> T findParentOfType( XbaseElement from, Class<T> xElementClass) { String elementName = XelementName.getElementNameFromClass(xElementClass); if(elementName == null || from == null || from.getBaseElement() == null){ return null; } Node parent = from.getBaseElement().getParentNode(); while(from.getBaseElement().getParentNode() != null){ if (parent.getNodeType() == Node.ELEMENT_NODE) { Element element = (Element) parent; if(element.getTagName().equals(elementName)){ try{ return xElementClass. getDeclaredConstructor(Element.class).newInstance(element); } catch(Exception ex){ return null; } } } parent = parent.getParentNode(); } return null; } /** * Find the first element with tag corresponding to elementName. * @param parent The root element to search from. * @param elementName The tag of the element to search for. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return first element found. Null if no element is found. */ private static Element findElement(XbaseElement parent, String elementName, boolean any){ return findElement(parent.getBaseElement(), elementName, any); } /** * Find the first element with tag corresponding to elementName. * @param parent The root element to search from. * @param elementName The tag of the element to search for. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return first element found. Null if no element is found. */ private static Element findElement(Element parent, String elementName, boolean any){ if(any){ return findFirstElement(parent, elementName); } else { return findFirstChildElement(parent, elementName); } } private static Element findFirstElement(XbaseElement parent, String elementName){ return findFirstElement(parent.getBaseElement(), elementName); } /** * Find the first element with tag corresponding to elementName nested under * the parent element. * @param parent The root element to search from. * @param elementName The tag of the element to search for. * @return The first element found under parent with the corresponding tag. * Null if no element is found. */ private static Element findFirstElement(Element parent, String elementName){ NodeList elements = parent.getElementsByTagName(elementName); if(elements.getLength() == 0){ return null; } return (Element) elements.item(0); } /** * Find the first element with tag corresponding to elementName in the direct * children of the parent element. * @param parent The root element to search from. * @param elementName The tag of the element to search for. * @return The first element found in the direct children of the element * parent with the corresponding tag. Null if no element is found. */ private static Element findFirstChildElement(Element parent, String elementName){ NodeList nodeList = parent.getChildNodes(); for (int i = 0; i < nodeList.getLength(); i++) { Node nextNode = nodeList.item(i); if(nextNode.getNodeType() == Node.ELEMENT_NODE){ Element element = (Element) nextNode; if(element.getTagName().equals(elementName)){ return element; } } } return null; } /** * Get the first child element. * @param parent Root element to search form. * @return First found element. */ private static Element getFirstChildElement(Element parent){ NodeList nodeList = parent.getChildNodes(); for (int i = 0; i < nodeList.getLength(); i++) { Node nextNode = nodeList.item(i); if (nextNode.getNodeType() == Node.ELEMENT_NODE) { return (Element) nextNode; } } return null; } /** * Get the xth child element. * @param parent Root element to search form. * @param position Position of the element to be found. Start at 0. * @return Element found at position. */ private static Element getXthChildElement(Element parent, int position){ int crtIndex = 0; NodeList nodeList = parent.getChildNodes(); for (int i = 0; i < nodeList.getLength(); i++) { Node nextNode = nodeList.item(i); if (nextNode.getNodeType() == Node.ELEMENT_NODE && crtIndex == position) { return (Element) nextNode; } else if (nextNode.getNodeType() == Node.ELEMENT_NODE){ ++crtIndex; } } return null; } /** * Create an empty XbinaryExpr object with the given tag. * @param exprTag The tag associated with the specialized binary expression. * @param xcodeml The current XcodeML program. * @return A new Xbinary object. * @throws IllegalTransformationException If the tag is not associated with * any binary expression. */ public static XbinaryExpr createEmpty(String exprTag, XcodeProgram xcodeml) throws IllegalTransformationException { if(XelementName.isBinaryExprTag(exprTag)){ Element element = xcodeml.getDocument().createElement(exprTag); return new XbinaryExpr(element); } throw new IllegalTransformationException("No binary expression with tag:" + exprTag); } /** * Create an empty of the given class element in the given program. * @param xElementClass The class to be created * @param xcodeml The current XcodeML program. * @param <T> Type of the class to be created. * @return An empty arrayIndex element. * @throws IllegalTransformationException if element cannot be created. */ public static <T extends XbaseElement> T createEmpty(Class<T> xElementClass, XcodeProgram xcodeml) throws IllegalTransformationException { String elementName = XelementName.getElementNameFromClass(xElementClass); if(elementName != null){ Element element = xcodeml.getDocument().createElement(elementName); try { return xElementClass. getDeclaredConstructor(Element.class).newInstance(element); } catch(Exception ex){ throw new IllegalTransformationException("Cannot create new statement: " + elementName); } } throw new IllegalTransformationException("Undefined statement for classe:" + xElementClass.toString()); } /** * Constructs a new empty XcodeML program. * @return A new XcodeProgram object with only the root element. */ public static XcodeProgram createNewProgram(){ try { DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); Document doc = docBuilder.newDocument(); doc.createElement(XelementName.X_CODE_PROGRAM); return new XcodeProgram(doc); } catch (ParserConfigurationException ex){ return null; } } /** * Create an empty arrayIndex element in the given program * @param xcodeml The current XcodeProgram in wihch the statement is created. * @param range The iteration range to be applied to the do statement. * @return A new XdoStetement object with an empty body. */ public static XdoStatement createWithEmptyBody(XcodeProgram xcodeml, XloopIterationRange range) { Element element = xcodeml.getDocument().createElement(XelementName.DO_STMT); if(range != null){ element.appendChild(range.getInductionVar().cloneNode()); element.appendChild(range.getIndexRange().cloneNode()); } Element body = xcodeml.getDocument().createElement(XelementName.BODY); element.appendChild(body); return new XdoStatement(element); } /** * Get the depth of an element in the AST. * @param element The element to start from. * @return A depth value >= 0. */ public static int getDepth(Element element) { Node parent = element.getParentNode(); int depth = 0; while(parent != null && parent.getNodeType() == Node.ELEMENT_NODE) { ++depth; parent = parent.getParentNode(); } return depth; } }
omni-cx2x/src/cx2x/xcodeml/helper/XelementHelper.java
/* * This file is released under terms of BSD license * See LICENSE file for more information */ package cx2x.xcodeml.helper; import cx2x.xcodeml.exception.*; import cx2x.xcodeml.xelement.*; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.w3c.dom.Node; import org.w3c.dom.NamedNodeMap; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.OutputKeys; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.Transformer; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import javax.xml.xpath.*; import java.io.File; import java.util.ArrayList; import java.util.List; /** * The class XelementHelper contains only static method to help manipulating the * raw Elements in the XcodeML representation by using the abstracted Xelements. * * @author clementval */ public class XelementHelper { /** * Get a text attribute value from an element. * @param el The element in which the attribute is searched. * @param attrName The name of the attribute to be found. * @return The attribute's value if the attribute is found. Null otherwise. */ public static String getAttributeValue(XbaseElement el, String attrName){ if(el == null || el.getBaseElement() == null){ return null; } NamedNodeMap attributes = el.getBaseElement().getAttributes(); for (int j = 0; j < attributes.getLength(); j++) { if(attributes.item(j).getNodeName().equals(attrName)){ return attributes.item(j).getNodeValue(); } } return null; } /** * Get a boolean attribute value from an element. * @param el The element in which the attribute is searched. * @param attrName The name of the attribute to be found. * @return The attribute's value if the attribute is found. Null otherwise. */ public static boolean getBooleanAttributeValue(XbaseElement el, String attrName) { if (el == null || el.getBaseElement() == null) { return false; } String value = XelementHelper.getAttributeValue(el, attrName); return value != null && value.equals(XelementName.TRUE); } /** * Find a function definition according to a function call. * @param xcodeml The XcodeML program to search in. * @param fctCall The function call used to find the function definition. * @return A function definition element if found. Null otherwise. */ public static XfunctionDefinition findFunctionDefinition(XcodeProgram xcodeml, XfunctionCall fctCall) { if(xcodeml.getBaseElement() == null){ return null; } String name = fctCall.getName().getValue(); NodeList nList = xcodeml.getBaseElement().getElementsByTagName(XelementName.FCT_DEFINITION); for (int i = 0; i < nList.getLength(); i++) { Node fctDefNode = nList.item(i); if (fctDefNode.getNodeType() == Node.ELEMENT_NODE) { XbaseElement dummyFctDef = new XbaseElement((Element)fctDefNode); Xname fctDefName = findName(dummyFctDef, false); if(name != null && fctDefName.isIdentical(name)){ return new XfunctionDefinition(dummyFctDef.getBaseElement()); } } } return null; } /** * Find all array references elements in a given body. * @param parent The body element to search for the array references. * @return A list of all array references found. */ public static List<XarrayRef> getAllArrayReferences(XbaseElement parent){ List<XarrayRef> references = new ArrayList<>(); NodeList nList = parent.getBaseElement(). getElementsByTagName(XelementName.F_ARRAY_REF); for (int i = 0; i < nList.getLength(); i++) { Node n = nList.item(i); if (n.getNodeType() == Node.ELEMENT_NODE) { Element el = (Element) n; XarrayRef ref = new XarrayRef(el); references.add(ref); } } return references; } /** * Find all array references in the next children that match the given * criteria. * * This methods use powerful Xpath expression to locate the correct nodes in * the AST * * Here is an example of such a query that return all node that are array * references for the array "array6" with an offset of 0 -1 * * //FarrayRef[varRef[Var[text()="array6"]] and arrayIndex and * arrayIndex[minusExpr[Var and FintConstant[text()="1"]]]] * * @param from The element from which the search is initiated. * @param identifier Identifier of the array. * @param offsets List of offsets to be search for. * @return A list of all array references found. */ public static List<XarrayRef> getAllArrayReferencesByOffsets( XbaseElement from, String identifier, List<Integer> offsets) { String offsetXpath = ""; for (int i = 0; i < offsets.size(); ++i){ if(offsets.get(i) == 0){ offsetXpath += String.format("%s[position()=%s and %s]", XelementName.ARRAY_INDEX, i+1, XelementName.VAR ); } else if(offsets.get(i) > 0) { offsetXpath += String.format("%s[position()=%s and %s[%s and %s[text()=\"%s\"]]]", XelementName.ARRAY_INDEX, i+1, XelementName.MINUS_EXPR, XelementName.VAR, XelementName.F_INT_CONST, offsets.get(i)); } else { offsetXpath += String.format("%s[position()=%s and %s[%s and %s[text()=\"%s\"]]]", XelementName.ARRAY_INDEX, i+1, XelementName.MINUS_EXPR, XelementName.VAR, XelementName.F_INT_CONST, Math.abs(offsets.get(i))); } if(i != offsets.size()-1){ offsetXpath += " and "; } } // Start of the Xpath query String xpathQuery = String.format(".//%s[%s[%s[text()=\"%s\"]] and %s]", XelementName.F_ARRAY_REF, XelementName.VAR_REF, XelementName.VAR, identifier, offsetXpath ); List<XarrayRef> arrayRefs = new ArrayList<>(); try { XPathFactory xPathfactory = XPathFactory.newInstance(); XPath xpath = xPathfactory.newXPath(); XPathExpression xpathExpr = xpath.compile(xpathQuery); NodeList output = (NodeList) xpathExpr.evaluate(from.getBaseElement(), XPathConstants.NODESET); for (int i = 0; i < output.getLength(); i++) { Element arrayRef = (Element) output.item(i); arrayRefs.add(new XarrayRef(arrayRef)); } } catch (XPathExpressionException ignored) { String t = ignored.getMessage(); } return arrayRefs; } /** * Find all real constants in the direct children of the given parent. * @param parent Root element to search from. * @return A list of all found real constants. */ public static List<XrealConstant> getRealConstants(XbaseElement parent){ List<XrealConstant> elements = new ArrayList<>(); Node n = parent.getBaseElement().getFirstChild(); while(n != null){ if (n.getNodeType() == Node.ELEMENT_NODE) { Element el = (Element) n; if(el.getTagName().equals(XelementName.F_REAL_CONST)) { XrealConstant ref = new XrealConstant(el); elements.add(ref); } } n = n.getNextSibling(); } return elements; } /** * Insert a function call at the end of a do statement. * @param loop The do statement to insert in. * @param call The function call to be inserted. */ public static void insertFctCallIntoLoop(XdoStatement loop, XfunctionCall call){ loop.getBody().getBaseElement().appendChild(call.getBaseElement().getParentNode()); } /** * Find function definition in the ancestor. * @param child The child element to search from. * @return A XfunctionDefinition object if found. Null otherwise. */ public static XfunctionDefinition findParentFctDef(XbaseElement child){ return findParentOfType(child, XfunctionDefinition.class); } /** * Find do statment in which the child is included if any. * @param child The child element to search from. * @return A XdoStatement object if found. Null otherwise. */ public static XdoStatement findParentDoStmt(XbaseElement child){ return findParentOfType(child, XdoStatement.class); } /** * Find a pragma element in the previous nodes containing a given keyword. * @param from Element to start from. * @param keyword Keyword to be found in the pragma. * @return The pragma if found. Null otherwise. */ public static Xpragma findPreviousPragma(XbaseElement from, String keyword){ if(from == null || from.getBaseElement() == null){ return null; } Node prev = from.getBaseElement().getPreviousSibling(); Node parent = from.getBaseElement(); do { while (prev != null) { if (prev.getNodeType() == Node.ELEMENT_NODE) { Element element = (Element) prev; if (element.getTagName().equals(XelementName.PRAGMA_STMT) && element.getTextContent().toLowerCase(). contains(keyword.toLowerCase())) { return new Xpragma(element); } } prev = prev.getPreviousSibling(); } parent = parent.getParentNode(); prev = parent; } while(parent != null); return null; } /** * Find do statement element. * @param fctDef Function definition to search in. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XdoStatement object if found. Null otherwise. */ public static XdoStatement findDoStatement(XfunctionDefinition fctDef, boolean any){ Xbody body = fctDef.getBody(); return XelementHelper.findDoStatement(body, any); } /** * Find var element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xvar object if found. Null otherwise. */ public static Xvar findVar(XbaseElement parent, boolean any){ return findXelement(parent, any, Xvar.class); } /** * Find varRef element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XvarRef object if found. Null otherwise. */ public static XvarRef findVarRef(XbaseElement parent, boolean any){ return findXelement(parent, any, XvarRef.class); } /** * Find indexRange element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XindexRange object if found. Null otherwise. */ public static XindexRange findIndexRange(XbaseElement parent, boolean any){ return findXelement(parent, any, XindexRange.class); } /** * Find arrayIndex element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XarrayIndex object if found. Null otherwise. */ public static XarrayIndex findArrayIndex(XbaseElement parent, boolean any){ return findXelement(parent, any, XarrayIndex.class); } /** * Find name element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xname object if found. Null otherwise. */ public static Xname findName(XbaseElement parent, boolean any){ return findXelement(parent, any, Xname.class); } /** * Find value element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xvalue object if found. Null otherwise. */ public static Xvalue findValue(XbaseElement parent, boolean any){ return findXelement(parent, any, Xvalue.class); } /** * Find lValueModel element at given position. * @param parent Root element to search from. * @param position Position of the element to be found in the parent children * list. * @return A XLValueModel object if found. Null otherwise. */ public static XLValueModel findLValueModel(XbaseElement parent, int position){ Element element = getXthChildElement(parent.getBaseElement(), position); if(element == null){ return null; } switch (element.getTagName()) { case XelementName.VAR: return new XLValueModel(new Xvar(element)); case XelementName.F_ARRAY_REF: return new XLValueModel(new XarrayRef(element)); case XelementName.F_CHAR_REF: case XelementName.F_MEMBER_REF: case XelementName.F_COARRAY_REF: return null; // TODO when classes are available default: return null; } } /** * Find exprModel element at given position. * @param parent Root element to search from. * @param position Position of the element to be found in the parent children * list. * @return A XexprModel object if found. Null otherwise. */ public static XexprModel findExprModel(XbaseElement parent, int position){ /** An exprModel can be of the following type * - FintConstant, FrealConstant, FcomplexConstant, FcharacterConstant, * FlogicalConstant * TODO FarrayConstructor, FstructConstructor * - FarrayConstructor, FstructConstructor * - Var * TODO FcharacterRef, FmemberRef, FcoArrayRef * - FarrayRef, FcharacterRef, FmemberRef, FcoArrayRef, varRef * - functionCall * - plusExpr, minusExpr, mulExpr, divExpr, FpowerExpr, FconcatExpr * logEQExpr, logNEQExpr, logGEExpr, logGTExpr, logLEExpr, logLTExpr, * logAndExpr, logOrExpr, logEQVExpr, logNEQVExpr, logNotExpr, * unaryMinusExpr, userBinaryExpr, userUnaryExpr * TODO FdoLoop * - FdoLoop */ Element element = getXthChildElement(parent.getBaseElement(), position); if(element == null){ return null; } switch (element.getTagName()){ case XelementName.F_INT_CONST: return new XexprModel(new XintConstant(element)); case XelementName.F_REAL_CONST: return new XexprModel(new XrealConstant(element)); case XelementName.F_LOGICAL_CONST: return new XexprModel(new XlogicalConstant(element)); case XelementName.F_COMPLEX_CONST: return new XexprModel(new XcomplexConstant(element)); case XelementName.F_CHAR_CONST: return new XexprModel(new XcharacterConstant(element)); case XelementName.VAR: return new XexprModel(new Xvar(element)); case XelementName.FCT_CALL: return new XexprModel(new XfunctionCall(element)); case XelementName.F_ARRAY_REF: return new XexprModel(new XarrayRef(element)); case XelementName.VAR_REF: return new XexprModel(new XvarRef(element)); // binary expression case XelementName.DIV_EXPR: case XelementName.F_CONCAT_EXPR: case XelementName.F_POWER_EXPR: case XelementName.LOG_AND_EXPR: case XelementName.LOG_EQ_EXPR: case XelementName.LOG_EQV_EXPR: case XelementName.LOG_GE_EXPR: case XelementName.LOG_GT_EXPR: case XelementName.LOG_LE_EXPR: case XelementName.LOG_LT_EXPR: case XelementName.LOG_NEQ_EXPR: case XelementName.LOG_NEWV_EXPR: case XelementName.LOG_OR_EXPR: case XelementName.MINUS_EXPR: case XelementName.MUL_EXPR: case XelementName.PLUS_EXPR: case XelementName.USER_BINARY_EXPR: return new XexprModel(new XbinaryExpr(element)); // unary expression case XelementName.LOG_NOT_EXPR: case XelementName.UNARY_MINUS_EXPR: case XelementName.USER_UNARY_EXPR: return new XexprModel(new XunaryExpr(element)); default: return null; } } /** * The inner element of a varRef is one of the following: * - Var * - FmemberRef * - FarrayRef * - FcharacterRef * - FcoArrayRef * @param parent The root element to search form. * @return The varRef inner element as a XbaseElement derived type. */ public static XbaseElement findVarRefInnerElement(XbaseElement parent){ Element element = getFirstChildElement(parent.getBaseElement()); if(element == null){ return null; } switch (element.getTagName()) { case XelementName.VAR: return new Xvar(element); case XelementName.F_MEMBER_REF: return null; // TODO move to XmemberRef case XelementName.F_ARRAY_REF: return new XarrayRef(element); case XelementName.F_CHAR_REF: return null; // TODO move to XcharacterRef case XelementName.F_COARRAY_REF: return null; // TODO move to XcoArrayRef default: return null; } } /** * Find constant element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xconstant object if found. Null otherwise. */ public static Xconstant findConstant(XbaseElement parent, boolean any){ // FintConstant, FrealConstant, FcomplexConstant, FcharacterConstant, // FlogicalConstant XintConstant intConst = findIntConstant(parent, any); if(intConst != null){ return intConst; } XrealConstant realConst = findRealConstant(parent, any); if(realConst != null){ return realConst; } XcomplexConstant complexConst = findComplexConstant(parent, any); if(complexConst != null){ return complexConst; } XcharacterConstant charConst = findCharacterConstant(parent, any); if(charConst != null){ return charConst; } XlogicalConstant logConst = findLogicalConstant(parent, any); if(logConst != null){ return logConst; } return null; } /** * Find integer constant element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XintConstant object if found. Null otherwise. */ public static XintConstant findIntConstant(XbaseElement parent, boolean any){ return findXelement(parent, any, XintConstant.class); } /** * Find real constant element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XrealConstant object if found. Null otherwise. */ public static XrealConstant findRealConstant(XbaseElement parent, boolean any){ return findXelement(parent, any, XrealConstant.class); } /** * Find complex constant element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XcomplexConstant object if found. Null otherwise. */ public static XcomplexConstant findComplexConstant(XbaseElement parent, boolean any){ return findXelement(parent, any, XcomplexConstant.class); } /** * Find character constant element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XcharacterConstant object if found. Null otherwise. */ public static XcharacterConstant findCharacterConstant(XbaseElement parent, boolean any){ return findXelement(parent, any, XcharacterConstant.class); } /** * Find logical constant element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XlogicalConstant object if found. Null otherwise. */ public static XlogicalConstant findLogicalConstant(XbaseElement parent, boolean any){ return findXelement(parent, any, XlogicalConstant.class); } /** * Find condition element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xcondition object if found. Null otherwise. */ public static Xcondition findCondition(XbaseElement parent, boolean any){ return findXelement(parent, any, Xcondition.class); } /** * Find then element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xthen object if found. Null otherwise. */ public static Xthen findThen(XbaseElement parent, boolean any){ return findXelement(parent, any, Xthen.class); } /** * Find else element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xelse object if found. Null otherwise. */ public static Xelse findElse(XbaseElement parent, boolean any){ return findXelement(parent, any, Xelse.class); } /** * Find arguments element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XargumentsTable object if found. Null otherwise. */ public static XargumentsTable findArgumentsTable(XbaseElement parent, boolean any){ return findXelement(parent, any, XargumentsTable.class); } /** * Find lowerBound element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XlowerBound object if found. Null otherwise. */ public static XlowerBound findLowerBound(XbaseElement parent, boolean any){ return findXelement(parent, any, XlowerBound.class); } /** * Find upperBound element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XupperBound object if found. Null otherwise. */ public static XupperBound findUpperBound(XbaseElement parent, boolean any){ return findXelement(parent, any, XupperBound.class); } /** * Find step element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xstep object if found. Null otherwise. */ public static Xstep findStep(XbaseElement parent, boolean any){ return findXelement(parent, any, Xstep.class); } /** * Find body element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xbody object if found. Null otherwise. */ public static Xbody findBody(XbaseElement parent, boolean any){ return findXelement(parent, any, Xbody.class); } /** * Find do statement element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XdoStatement object if found. Null otherwise. */ public static XdoStatement findDoStatement(XbaseElement parent, boolean any){ return findXelement(parent, any, XdoStatement.class); } /** * Find the direct next do statement element. * @param from The element to search from. Direct next sibling is searched. * @return A XdoStatement object if it directly follows the given from * element. Null otherwise. */ public static XdoStatement findNextDoStatement(XbaseElement from){ return findNextElementOfType(from, XdoStatement.class); } /** * Find the direct next do statement element. * @param from The element to search from. Direct next sibling is searched. * @param until The element to search until. * @return A XdoStatement object if it directly follows the given from * element. Null otherwise. */ public static XdoStatement findNextDoStatement(XbaseElement from, XbaseElement until) { return findNextElementOfType(from, until, XdoStatement.class); } /** * Find symbols element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XsymbolsTable object if found. Null otherwise. */ public static XsymbolTable findSymbols(XbaseElement parent, boolean any){ return findXelement(parent, any, XsymbolTable.class); } /** * Find declarations element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XdeclTable object if found. Null otherwise. */ public static XdeclTable findDeclarations(XbaseElement parent, boolean any){ return findXelement(parent, any, XdeclTable.class); } /** * Find type table elements. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XtypeTable object if found. Null otherwise. */ public static XtypeTable findTypeTable(XcodeProgram parent, boolean any){ return findXelement(parent, any, XtypeTable.class); } /** * Find global symbols element in the XcodeML representation. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XglobalSymbolTable object if found. Null otherwise. */ public static XglobalSymbolTable findGlobalSymbols(XcodeProgram parent, boolean any) { return findXelement(parent, any, XglobalSymbolTable.class); } /** * Find global declarations element in the XcodeML representation. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XglobalSymbolTable object if found. Null otherwise. */ public static XglobalDeclTable findGlobalDeclarations(XcodeProgram parent, boolean any) { return findXelement(parent, any, XglobalDeclTable.class); } /** * Find params in the XcodeML representation. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xparams object if found. Null otherwise. */ public static Xparams findParams(XbaseElement parent, boolean any){ return findXelement(parent, any, Xparams.class); } /** * Find number of index ranges in an element. * @param parent Root element to search from. * @return The number of index ranges found. */ public static int findNumberOfRange(XbaseElement parent){ int indexCounter = 0; Node node = parent.getBaseElement().getFirstChild(); while(node != null){ if(node.getNodeType() == Node.ELEMENT_NODE){ Element element = (Element)node; if(element.getTagName().equals(XelementName.INDEX_RANGE)){ ++indexCounter; } } } return indexCounter; } /** * Find all the index elements (arrayIndex and indexRange) in an element. * @param parent Root element to search from. * @return A list of all index ranges found. */ public static List<Xindex> findIndexes(XbaseElement parent){ List<Xindex> indexRanges = new ArrayList<>(); if(parent == null || parent.getBaseElement() == null){ return indexRanges; } Node node = parent.getBaseElement().getFirstChild(); while (node != null){ if(node.getNodeType() == Node.ELEMENT_NODE){ Element element = (Element)node; switch (element.getTagName()){ case XelementName.ARRAY_INDEX: indexRanges.add(new XarrayIndex(element)); break; case XelementName.INDEX_RANGE: indexRanges.add(new XindexRange(element)); break; } } node = node.getNextSibling(); } return indexRanges; } /** * Find all the name elements in an element. * @param parent Root element to search from. * @return A list of all name elements found. */ public static List<Xname> findAllNames(XbaseElement parent){ List<Xname> names = new ArrayList<>(); if(parent == null || parent.getBaseElement() == null){ return names; } Node node = parent.getBaseElement().getFirstChild(); while (node != null){ if(node.getNodeType() == Node.ELEMENT_NODE){ Element element = (Element)node; if(element.getTagName().equals(XelementName.NAME)) { names.add(new Xname(element)); } } node = node.getNextSibling(); } return names; } /** * Find len element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xlength object if found. Null otherwise. */ public static Xlength findLen(XbaseElement parent, boolean any){ return findXelement(parent, any, Xlength.class); } /** * Find kind element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A Xkind object if found. Null otherwise. */ public static Xkind findKind(XbaseElement parent, boolean any){ return findXelement(parent, any, Xkind.class); } /** * Find function call element. * @param parent Root element to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return A XfunctionCall object if found. Null otherwise. */ public static XfunctionCall findFctCall(XbaseElement parent, boolean any){ return findXelement(parent, any, XfunctionCall.class); } /** * Find a function call element nested in the given expression statement. * @param exprStmt The expression statement to search from. * @return A function call element if found. Null otherwise. */ public static XfunctionCall findFctCall(XexprStatement exprStmt){ if(exprStmt == null){ return null; } NodeList nodeList = exprStmt.getBaseElement().getChildNodes(); for (int i = 0; i < nodeList.getLength(); i++) { Node nextNode = nodeList.item(i); if(nextNode.getNodeType() == Node.ELEMENT_NODE){ Element element = (Element) nextNode; if(element.getTagName().equals(XelementName.FCT_CALL)){ return new XfunctionCall(element); } } } return null; } /** * Find if there is a expr statement directly after the given element. * @param from The element to search from * @return An expr statement element if found. Null otherwise. */ public static XexprStatement findNextExprStatement(XbaseElement from){ return findNextElementOfType(from, XexprStatement.class); } /** * Find if there is a do statement directly after the given element. * @param from The element to search from * @return A do statement element if found. Null otherwise. */ public static XdoStatement findDirectNextDoStmt(XbaseElement from){ return findDirectNextElement(from, XdoStatement.class); } /** * Find if there is an if statement directly after the given element. * @param from The element to search from * @return An if statement element if found. Null otherwise. */ public static XifStatement findDirectNextIfStmt(XbaseElement from){ return findDirectNextElement(from, XifStatement.class); } /** * Find if there is an assign statement directly after the given element. * @param from The element to search from * @return An assign statement element if found. Null otherwise. */ public static XassignStatement findDirectNextAssignStmt(XbaseElement from){ return findDirectNextElement(from, XassignStatement.class); } /** * Delete all the elements between the two given pragmas. * @param start The start pragma. Deletion start from next element. * @param end The end pragma. Deletion end just before this element. */ public static void deleteBetween(Xpragma start, Xpragma end){ ArrayList<Element> toDelete = new ArrayList<>(); Node node = start.getBaseElement().getNextSibling(); while (node != null && node != end.getBaseElement()){ if(node.getNodeType() == Node.ELEMENT_NODE){ Element element = (Element)node; toDelete.add(element); } node = node.getNextSibling(); } for(Element e : toDelete){ delete(e); } } /** * Find all the pragma element in an XcodeML tree. * @param xcodeml The XcodeML program to search in. * @return A list of all pragmas found in the XcodeML program. */ public static List<Xpragma> findAllPragmas(XcodeProgram xcodeml){ NodeList pragmaList = xcodeml.getDocument() .getElementsByTagName(XelementName.PRAGMA_STMT); List<Xpragma> pragmas = new ArrayList<>(); for (int i = 0; i < pragmaList.getLength(); i++) { Node pragmaNode = pragmaList.item(i); if (pragmaNode.getNodeType() == Node.ELEMENT_NODE) { Element element = (Element) pragmaNode; pragmas.add(new Xpragma(element)); } } return pragmas; } /** * Check if the two element are direct children of the same parent element. * @param e1 First element. * @param e2 Second element. * @return True if the two element are direct children of the same parent. * False otherwise. */ public static boolean hasSameParentBlock(XbaseElement e1, XbaseElement e2){ if(e1 == null || e2 == null || e1.getBaseElement() == null || e2.getBaseElement() == null) { return false; } return e1.getBaseElement().getParentNode() == e2.getBaseElement().getParentNode(); } /** * Insert all the statements from a given body at the end of another body * @param originalBody The body in which the extra body will be appended * @param extraBody The body that will be appended to the original body * @throws IllegalTransformationException if one of the body or their base * element is null. */ public static void appendBody(Xbody originalBody, Xbody extraBody) throws IllegalTransformationException { if(originalBody == null || originalBody.getBaseElement() == null || extraBody == null || extraBody.getBaseElement() == null) { throw new IllegalTransformationException("One of the body is null."); } // Append content of loop-body (loop) to this loop-body Node childNode = extraBody.getBaseElement().getFirstChild(); while(childNode != null){ Node nextChild = childNode.getNextSibling(); // Do something with childNode, including move or delete... if(childNode.getNodeType() == Node.ELEMENT_NODE){ originalBody.getBaseElement().appendChild(childNode); } childNode = nextChild; } } /** * Extract the body of a do statement and place it directly after it. * @param loop The do statement containing the body to be extracted. */ public static void extractBody(XdoStatement loop){ Element loopElement = loop.getBaseElement(); Element body = XelementHelper.findFirstElement(loopElement, XelementName.BODY); Node refNode = loopElement; if(body == null){ return; } for(Node childNode = body.getFirstChild(); childNode!=null;){ Node nextChild = childNode.getNextSibling(); // Do something with childNode, including move or delete... if(childNode.getNodeType() == Node.ELEMENT_NODE){ XelementHelper.insertAfter(refNode, childNode); refNode = childNode; } childNode = nextChild; } } /** * Delete an element for the tree. * @param element Element to be deleted. */ public static void delete(Element element){ if(element == null || element.getParentNode() == null){ return; } element.getParentNode().removeChild(element); } /** * Write the XcodeML to file or std out * @param xcodeml The XcodeML to write in the output * @param outputFile Path of the output file or null to output on std out * @param indent Number of spaces used for the indentation * @return true if the output could be write without problems. */ public static boolean writeXcodeML(XcodeProgram xcodeml, String outputFile, int indent) { try { XelementHelper.cleanEmptyTextNodes(xcodeml.getDocument()); Transformer transformer = TransformerFactory.newInstance().newTransformer(); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); transformer.setOutputProperty( "{http://xml.apache.org/xslt}indent-amount", Integer.toString(indent)); DOMSource source = new DOMSource(xcodeml.getDocument()); if(outputFile == null){ // Output to console StreamResult console = new StreamResult(System.out); transformer.transform(source, console); } else { // Output to file StreamResult console = new StreamResult(new File(outputFile)); transformer.transform(source, console); } } catch (TransformerConfigurationException ex){ xcodeml.addError("Cannot output file: " + ex.getMessage(), 0); return false; } catch (TransformerException ex){ xcodeml.addError("Cannot output file: " + ex.getMessage(), 0); return false; } return true; } /** * Removes text nodes that only contains whitespace. The conditions for * removing text nodes, besides only containing whitespace, are: If the * parent node has at least one child of any of the following types, all * whitespace-only text-node children will be removed: - ELEMENT child - * CDATA child - COMMENT child. * @param parentNode Root node to start the cleaning. */ public static void cleanEmptyTextNodes(Node parentNode) { boolean removeEmptyTextNodes = false; Node childNode = parentNode.getFirstChild(); while (childNode != null) { removeEmptyTextNodes |= checkNodeTypes(childNode); childNode = childNode.getNextSibling(); } if (removeEmptyTextNodes) { removeEmptyTextNodes(parentNode); } } /** * Validate a string attribute. * @param doc Document in which the attribute must be validated. * @param attrValue Attribute value expected. * @param xpathQuery Xpath query to locate the attribute value. * @return True if the attribute validates. False otherwise. * @throws Exception if xpathQuery cannot be executed. */ public static boolean validateStringAttribute(Document doc, String attrValue , String xpathQuery) throws Exception { XPathFactory xPathfactory = XPathFactory.newInstance(); XPath xpath = xPathfactory.newXPath(); XPathExpression getVersion = xpath.compile(xpathQuery); String outputValue = (String) getVersion.evaluate(doc, XPathConstants.STRING); return outputValue.equals(attrValue); } /** * Insert an element just before a reference element. * @param ref The reference element. * @param insert The element to be inserted. */ public static void insertBefore(XbaseElement ref, XbaseElement insert){ ref.getBaseElement().getParentNode().insertBefore(insert.getBaseElement(), ref.getBaseElement()); } /** * Insert an element just after a reference element. * @param refElement The reference element. * @param element The element to be inserted. */ public static void insertAfter(XbaseElement refElement, XbaseElement element){ XelementHelper.insertAfter(refElement.getBaseElement(), element.getBaseElement()); } /* * PRIVATE SECTION */ /** * Remove all empty text nodes in the subtree. * @param parentNode Root node to start the search. */ private static void removeEmptyTextNodes(Node parentNode) { Node childNode = parentNode.getFirstChild(); while (childNode != null) { // grab the "nextSibling" before the child node is removed Node nextChild = childNode.getNextSibling(); short nodeType = childNode.getNodeType(); if (nodeType == Node.TEXT_NODE) { boolean containsOnlyWhitespace = childNode.getNodeValue() .trim().isEmpty(); if (containsOnlyWhitespace) { parentNode.removeChild(childNode); } } childNode = nextChild; } } /** * Check the type of the given node. * @param childNode Node to be checked. * @return True if the node contains data. False otherwise. */ private static boolean checkNodeTypes(Node childNode) { short nodeType = childNode.getNodeType(); if (nodeType == Node.ELEMENT_NODE) { cleanEmptyTextNodes(childNode); // recurse into subtree } return nodeType == Node.ELEMENT_NODE || nodeType == Node.CDATA_SECTION_NODE || nodeType == Node.COMMENT_NODE; } /** * Insert a node directly after a reference node. * @param refNode The reference node. New node will be inserted after this * one. * @param newNode The new node to be inserted. */ private static void insertAfter(Node refNode, Node newNode){ refNode.getParentNode().insertBefore(newNode, refNode.getNextSibling()); } /** * Find an element of Class T in the nested elements under parent. * @param parent XbaseElement to search from. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @param xElementClass Element's class to be found. * @param <T> Derived class of XbaseElement. * @return An instance of T class if an element is found. Null if no element * is found. */ private static <T extends XbaseElement> T findXelement(XbaseElement parent, boolean any, Class<T> xElementClass) { String elementName = XelementName.getElementNameFromClass(xElementClass); if(elementName == null || parent == null || parent.getBaseElement() == null) { return null; } Element element = findElement(parent, elementName, any); if (element != null){ try{ return xElementClass. getDeclaredConstructor(Element.class).newInstance(element); } catch(Exception ex){ return null; } } return null; } /** * Find any element of the the given Class in the direct children of from * element. Only first level children are search for. * @param from XbaseElement to search from. * @param xElementClass Element's class to be found. * @param <T> Derived class of XbaseElement * @return The first element found under from element. Null if no element is * found. */ private static <T extends XbaseElement> T findNextElementOfType( XbaseElement from, Class<T> xElementClass) { String elementName = XelementName.getElementNameFromClass(xElementClass); if(elementName == null || from == null || from.getBaseElement() == null){ return null; } Node nextNode = from.getBaseElement().getNextSibling(); while (nextNode != null){ if(nextNode.getNodeType() == Node.ELEMENT_NODE){ Element element = (Element) nextNode; if(element.getTagName().equals(elementName)){ try { return xElementClass. getDeclaredConstructor(Element.class).newInstance(element); } catch(Exception ex){ return null; } } } nextNode = nextNode.getNextSibling(); } return null; } /** * Find any element of the the given Class in the direct children of from * element until the end element is reached. * Only first level children are search for. * @param from XbaseElement to search from. * @param until XbaseElement to search until. * @param xElementClass Element's class to be found. * @param <T> Derived class of XbaseElement * @return The first element found under from element. Null if no element is * found. */ private static <T extends XbaseElement> T findNextElementOfType( XbaseElement from, XbaseElement until, Class<T> xElementClass) { String elementName = XelementName.getElementNameFromClass(xElementClass); if(elementName == null || from == null || until == null || from.getBaseElement() == null) { return null; } Node nextNode = from.getBaseElement().getNextSibling(); while (nextNode != null){ if(nextNode.getNodeType() == Node.ELEMENT_NODE){ Element element = (Element) nextNode; if(element == until.getBaseElement()){ // End element is reached return null; } if(element.getTagName().equals(elementName)){ try { return xElementClass. getDeclaredConstructor(Element.class).newInstance(element); } catch(Exception ex){ return null; } } } nextNode = nextNode.getNextSibling(); } return null; } /** * Find element of the the given Class that is directly after the given from * element. * @param from XbaseElement to search from. * @param xElementClass Element's class to be found. * @param <T> Derived class of XbaseElement. * @return Instance of the xElementClass. Null if no element is found. */ private static <T extends XbaseElement> T findDirectNextElement( XbaseElement from, Class<T> xElementClass) { String elementName = XelementName.getElementNameFromClass(xElementClass); if(elementName == null || from == null || from.getBaseElement() == null){ return null; } Node nextNode = from.getBaseElement().getNextSibling(); while (nextNode != null){ if(nextNode.getNodeType() == Node.ELEMENT_NODE){ Element element = (Element) nextNode; if(element.getTagName().equals(elementName)){ try{ return xElementClass. getDeclaredConstructor(Element.class).newInstance(element); } catch(Exception ex){ return null; } } return null; } nextNode = nextNode.getNextSibling(); } return null; } /** * Find a parent element from a child in the ancestors. * @param from The child element to search from. * @return A XbaseElement object if found. Null otherwise. */ private static <T extends XbaseElement> T findParentOfType( XbaseElement from, Class<T> xElementClass) { String elementName = XelementName.getElementNameFromClass(xElementClass); if(elementName == null || from == null || from.getBaseElement() == null){ return null; } Node parent = from.getBaseElement().getParentNode(); while(from.getBaseElement().getParentNode() != null){ if (parent.getNodeType() == Node.ELEMENT_NODE) { Element element = (Element) parent; if(element.getTagName().equals(elementName)){ try{ return xElementClass. getDeclaredConstructor(Element.class).newInstance(element); } catch(Exception ex){ return null; } } } parent = parent.getParentNode(); } return null; } /** * Find the first element with tag corresponding to elementName. * @param parent The root element to search from. * @param elementName The tag of the element to search for. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return first element found. Null if no element is found. */ private static Element findElement(XbaseElement parent, String elementName, boolean any){ return findElement(parent.getBaseElement(), elementName, any); } /** * Find the first element with tag corresponding to elementName. * @param parent The root element to search from. * @param elementName The tag of the element to search for. * @param any If true, find in any nested element under parent. If * false, only direct children are search for. * @return first element found. Null if no element is found. */ private static Element findElement(Element parent, String elementName, boolean any){ if(any){ return findFirstElement(parent, elementName); } else { return findFirstChildElement(parent, elementName); } } private static Element findFirstElement(XbaseElement parent, String elementName){ return findFirstElement(parent.getBaseElement(), elementName); } /** * Find the first element with tag corresponding to elementName nested under * the parent element. * @param parent The root element to search from. * @param elementName The tag of the element to search for. * @return The first element found under parent with the corresponding tag. * Null if no element is found. */ private static Element findFirstElement(Element parent, String elementName){ NodeList elements = parent.getElementsByTagName(elementName); if(elements.getLength() == 0){ return null; } return (Element) elements.item(0); } /** * Find the first element with tag corresponding to elementName in the direct * children of the parent element. * @param parent The root element to search from. * @param elementName The tag of the element to search for. * @return The first element found in the direct children of the element * parent with the corresponding tag. Null if no element is found. */ private static Element findFirstChildElement(Element parent, String elementName){ NodeList nodeList = parent.getChildNodes(); for (int i = 0; i < nodeList.getLength(); i++) { Node nextNode = nodeList.item(i); if(nextNode.getNodeType() == Node.ELEMENT_NODE){ Element element = (Element) nextNode; if(element.getTagName().equals(elementName)){ return element; } } } return null; } /** * Get the first child element. * @param parent Root element to search form. * @return First found element. */ private static Element getFirstChildElement(Element parent){ NodeList nodeList = parent.getChildNodes(); for (int i = 0; i < nodeList.getLength(); i++) { Node nextNode = nodeList.item(i); if (nextNode.getNodeType() == Node.ELEMENT_NODE) { return (Element) nextNode; } } return null; } /** * Get the xth child element. * @param parent Root element to search form. * @param position Position of the element to be found. Start at 0. * @return Element found at position. */ private static Element getXthChildElement(Element parent, int position){ int crtIndex = 0; NodeList nodeList = parent.getChildNodes(); for (int i = 0; i < nodeList.getLength(); i++) { Node nextNode = nodeList.item(i); if (nextNode.getNodeType() == Node.ELEMENT_NODE && crtIndex == position) { return (Element) nextNode; } else if (nextNode.getNodeType() == Node.ELEMENT_NODE){ ++crtIndex; } } return null; } /** * Create an empty XbinaryExpr object with the given tag. * @param exprTag The tag associated with the specialized binary expression. * @param xcodeml The current XcodeML program. * @return A new Xbinary object. * @throws IllegalTransformationException If the tag is not associated with * any binary expression. */ public static XbinaryExpr createEmpty(String exprTag, XcodeProgram xcodeml) throws IllegalTransformationException { if(XelementName.isBinaryExprTag(exprTag)){ Element element = xcodeml.getDocument().createElement(exprTag); return new XbinaryExpr(element); } throw new IllegalTransformationException("No binary expression with tag:" + exprTag); } /** * Create an empty of the given class element in the given program. * @param xElementClass The class to be created * @param xcodeml The current XcodeML program. * @param <T> Type of the class to be created. * @return An empty arrayIndex element. * @throws IllegalTransformationException if element cannot be created. */ public static <T extends XbaseElement> T createEmpty(Class<T> xElementClass, XcodeProgram xcodeml) throws IllegalTransformationException { String elementName = XelementName.getElementNameFromClass(xElementClass); if(elementName != null){ Element element = xcodeml.getDocument().createElement(elementName); try { return xElementClass. getDeclaredConstructor(Element.class).newInstance(element); } catch(Exception ex){ throw new IllegalTransformationException("Cannot create new statement: " + elementName); } } throw new IllegalTransformationException("Undefined statement for classe:" + xElementClass.toString()); } /** * Constructs a new empty XcodeML program. * @return A new XcodeProgram object with only the root element. */ public static XcodeProgram createNewProgram(){ try { DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); Document doc = docBuilder.newDocument(); doc.createElement(XelementName.X_CODE_PROGRAM); return new XcodeProgram(doc); } catch (ParserConfigurationException ex){ return null; } } /** * Create an empty arrayIndex element in the given program * @param xcodeml The current XcodeProgram in wihch the statement is created. * @param range The iteration range to be applied to the do statement. * @return A new XdoStetement object with an empty body. */ public static XdoStatement createWithEmptyBody(XcodeProgram xcodeml, XloopIterationRange range) { Element element = xcodeml.getDocument().createElement(XelementName.DO_STMT); if(range != null){ element.appendChild(range.getInductionVar().cloneNode()); element.appendChild(range.getIndexRange().cloneNode()); } Element body = xcodeml.getDocument().createElement(XelementName.BODY); element.appendChild(body); return new XdoStatement(element); } /** * Get the depth of an element in the AST. * @param element The element to start from. * @return A depth value >= 0. */ public static int getDepth(Element element) { Node parent = element.getParentNode(); int depth = 0; while(parent != null && parent.getNodeType() == Node.ELEMENT_NODE) { ++depth; parent = parent.getParentNode(); } return depth; } }
Add method to retrieve all assign stmt with array refs in block
omni-cx2x/src/cx2x/xcodeml/helper/XelementHelper.java
Add method to retrieve all assign stmt with array refs in block
Java
bsd-3-clause
968b95c595fc9819fe9db99282a0fc816d9aed2c
0
appcelerator/jaxen_titanium,ukcrpb6/jaxen-wmb-extensions
/* $Id$ Copyright 2003 (C) The Werken Company. All Rights Reserved. Redistribution and use of this software and associated documentation ("Software"), with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain copyright statements and notices. Redistributions must also contain a copy of this document. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name "jaxen" must not be used to endorse or promote products derived from this Software without prior written permission of The Werken Company. For written permission, please contact [email protected]. 4. Products derived from this Software may not be called "jaxen" nor may "jaxen" appear in their names without prior written permission of The Werken Company. "jaxen" is a registered trademark of The Werken Company. 5. Due credit should be given to The Werken Company. (http://jaxen.werken.com/). THIS SOFTWARE IS PROVIDED BY THE WERKEN COMPANY AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE WERKEN COMPANY OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.jaxen.expr; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import org.jaxen.Context; import org.jaxen.ContextSupport; import org.jaxen.JaxenException; import org.jaxen.UnresolvableException; import org.jaxen.Navigator; import org.jaxen.expr.iter.IterableAxis; import org.jaxen.saxpath.Axis; import org.jaxen.util.IdentityHashMap; /** * Expression object that represents any flavor * of name-test steps within an XPath. * <p> * This includes simple steps, such as "foo", * non-default-axis steps, such as "following-sibling::foo" * or "@foo", and namespace-aware steps, such * as "foo:bar". * * @author bob mcwhirter ([email protected]) * @author Stephen Colebourne */ public class DefaultNameStep extends DefaultStep implements NameStep { /** Dummy object used to convert HashMap to HashSet */ private final static Object PRESENT = new Object(); /** * Our prefix, bound through the current Context. * The empty-string ("") if no prefix was specified. * Decidedly NOT-NULL, due to SAXPath constraints. * This is the 'foo' in 'foo:bar'. */ private String prefix; /** * Our local-name. * This is the 'bar' in 'foo:bar'. */ private String localName; /** Quick flag denoting if the local name was '*' */ private boolean matchesAnyName; /** Quick flag denoting if we have a namespace prefix **/ private boolean hasPrefix; /** * Constructor. * * @param axis the axis to work through * @param prefix the name prefix * @param localName the local name * @param predicateSet the set of predicates */ public DefaultNameStep(IterableAxis axis, String prefix, String localName, PredicateSet predicateSet) { super(axis, predicateSet); this.prefix = prefix; this.localName = localName; this.matchesAnyName = "*".equals(localName); this.hasPrefix = (this.prefix != null && this.prefix.length() > 0); } /** * Gets the namespace prefix. * * @return the prefix */ public String getPrefix() { return this.prefix; } /** * Gets the local name. * * @return the local name */ public String getLocalName() { return this.localName; } /** * Does this step match any name (xpath of '*'). * * @return true if it matches any name */ public boolean isMatchesAnyName() { return matchesAnyName; } /** * Gets the step as a fully defined xpath. * * @return the full xpath for this step */ public String getText() { StringBuffer buf = new StringBuffer(64); buf.append(getAxisName()).append("::"); if (getPrefix() != null && getPrefix().length() > 0) { buf.append(getPrefix()).append(':'); } return buf.append(getLocalName()).append(super.getText()).toString(); } /** * Evaluate the context node set to find the new node set. * <p> * This method overrides the version in DefaultStep for performance. */ public List evaluate(Context context) throws JaxenException { List contextNodeSet = context.getNodeSet(); int contextSize = contextNodeSet.size(); // optimize for context size 0 if (contextSize == 0) { return Collections.EMPTY_LIST; } ContextSupport support = context.getContextSupport(); boolean namedAccess = (!matchesAnyName && getIterableAxis().supportsNamedAccess(support)); // optimize for context size 1 (common case, avoids lots of object creation) if (contextSize == 1) { Object contextNode = contextNodeSet.get(0); if (namedAccess) { // get the iterator over the nodes and check it String uri = support.translateNamespacePrefixToUri(prefix); Iterator axisNodeIter = getIterableAxis().namedAccessIterator( contextNode, support, localName, prefix, uri); if (axisNodeIter == null || axisNodeIter.hasNext() == false) { return Collections.EMPTY_LIST; } // convert iterator to list for predicate test // no need to filter as named access guarantees this List newNodeSet = new ArrayList(); while (axisNodeIter.hasNext()) { newNodeSet.add(axisNodeIter.next()); } // evaluate the predicates return getPredicateSet().evaluatePredicates(newNodeSet, support); } else { // get the iterator over the nodes and check it Iterator axisNodeIter = axisIterator(contextNode, support); if (axisNodeIter == null || axisNodeIter.hasNext() == false) { return Collections.EMPTY_LIST; } // run through iterator, filtering using matches() // adding to list for predicate test List newNodeSet = new ArrayList(); while (axisNodeIter.hasNext()) { Object eachAxisNode = axisNodeIter.next(); if (matches(eachAxisNode, support)) { newNodeSet.add(eachAxisNode); } } // evaluate the predicates return getPredicateSet().evaluatePredicates(newNodeSet, support); } } // full case Map unique = new IdentityHashMap(); List interimSet = new ArrayList(contextSize); List newNodeSet = new ArrayList(contextSize); if (namedAccess) { String uri = support.translateNamespacePrefixToUri(prefix); for (int i = 0; i < contextSize; ++i) { Object eachContextNode = contextNodeSet.get(i); Iterator axisNodeIter = getIterableAxis().namedAccessIterator( eachContextNode, support, localName, prefix, uri); if (axisNodeIter == null || axisNodeIter.hasNext() == false) { continue; } // ensure only one of each node in the result while (axisNodeIter.hasNext()) { Object eachAxisNode = axisNodeIter.next(); if (unique.put(eachAxisNode, PRESENT) == null) { interimSet.add(eachAxisNode); } } // evaluate the predicates newNodeSet.addAll(getPredicateSet().evaluatePredicates(interimSet, support)); interimSet.clear(); } } else { for (int i = 0; i < contextSize; ++i) { Object eachContextNode = contextNodeSet.get(i); Iterator axisNodeIter = axisIterator(eachContextNode, support); if (axisNodeIter == null || axisNodeIter.hasNext() == false) { continue; } // ensure only unique matching nodes in the result while (axisNodeIter.hasNext()) { Object eachAxisNode = axisNodeIter.next(); if (matches(eachAxisNode, support)) { if (unique.put(eachAxisNode, PRESENT) == null) { interimSet.add(eachAxisNode); } } } // evaluate the predicates newNodeSet.addAll(getPredicateSet().evaluatePredicates(interimSet, support)); interimSet.clear(); } } return newNodeSet; } /** * Checks whether the node matches this step. * * @param node the node to check * @param contextSupport the context support * @return true if matches */ public boolean matches(Object node, ContextSupport contextSupport) throws JaxenException { Navigator nav = contextSupport.getNavigator(); String myUri = null; String nodeName = null; String nodeUri = null; if (nav.isElement(node)) { nodeName = nav.getElementName(node); nodeUri = nav.getElementNamespaceUri(node); } else if (nav.isText(node)) { return false; } else if (nav.isAttribute(node)) { if (getAxis() != Axis.ATTRIBUTE) { return false; } nodeName = nav.getAttributeName(node); nodeUri = nav.getAttributeNamespaceUri(node); } else if (nav.isDocument(node)) { return false; } else if (nav.isNamespace(node)) { if (matchesAnyName && getAxis() != Axis.NAMESPACE) { // Only works for namespace::* return false; } nodeName = nav.getNamespacePrefix(node); } else { return false; } if (hasPrefix) { myUri = contextSupport.translateNamespacePrefixToUri(this.prefix); if (myUri == null) { throw new UnresolvableException("Cannot resolve namespace prefix '"+this.prefix+"'"); } } else if (matchesAnyName) { return true; } // If we map to a non-empty namespace and the node does not // or vice-versa, fail-fast. if (hasNamespace(myUri) != hasNamespace(nodeUri)) { return false; } // To fail-fast, we check the equality of // local-names first. Shorter strings compare // quicker. if (matchesAnyName || nodeName.equals(getLocalName())) { return matchesNamespaceURIs(myUri, nodeUri); } return false; } /** * Checks whether the URI represents a namespace. * * @param uri the URI to check * @return true if non-null and non-empty */ private boolean hasNamespace(String uri) { return (uri != null && uri.length() > 0); } /** * Compares two namespace URIs, handling null. * * @param uri1 the first URI * @param uri2 the second URI * @return true if equal, where null=="" */ protected boolean matchesNamespaceURIs(String uri1, String uri2) { if (uri1 == uri2) { return true; } if (uri1 == null) { return (uri2.length() == 0); } if (uri2 == null) { return (uri1.length() == 0); } return uri1.equals(uri2); } /** * Visitor pattern for the step. * * @param visitor the visitor object */ public void accept(Visitor visitor) { visitor.visit(this); } /** * Returns a full information debugging string. * * @return a debugging string */ public String toString() { return "[(DefaultNameStep): " + getPrefix() + ":" + getLocalName() + "[" + super.toString() + "]]"; } }
src/java/main/org/jaxen/expr/DefaultNameStep.java
/* $Id$ Copyright 2003 (C) The Werken Company. All Rights Reserved. Redistribution and use of this software and associated documentation ("Software"), with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain copyright statements and notices. Redistributions must also contain a copy of this document. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name "jaxen" must not be used to endorse or promote products derived from this Software without prior written permission of The Werken Company. For written permission, please contact [email protected]. 4. Products derived from this Software may not be called "jaxen" nor may "jaxen" appear in their names without prior written permission of The Werken Company. "jaxen" is a registered trademark of The Werken Company. 5. Due credit should be given to The Werken Company. (http://jaxen.werken.com/). THIS SOFTWARE IS PROVIDED BY THE WERKEN COMPANY AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE WERKEN COMPANY OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.jaxen.expr; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import org.jaxen.Context; import org.jaxen.ContextSupport; import org.jaxen.JaxenException; import org.jaxen.UnresolvableException; import org.jaxen.Navigator; import org.jaxen.expr.iter.IterableAxis; import org.jaxen.saxpath.Axis; import org.jaxen.util.IdentityHashMap; /** * Expression object that represents any flavor * of name-test steps within an XPath. * <p> * This includes simple steps, such as "foo", * non-default-axis steps, such as "following-sibling::foo" * or "@foo", and namespace-aware steps, such * as "foo:bar". * * @author bob mcwhirter ([email protected]) * @author Stephen Colebourne */ public class DefaultNameStep extends DefaultStep implements NameStep { /** Dummy object used to convert HashMap to HashSet */ private final static Object PRESENT = new Object(); /** * Our prefix, bound through the current Context. * The empty-string ("") if no prefix was specified. * Decidedly NOT-NULL, due to SAXPath constraints. * This is the 'foo' in 'foo:bar'. */ private String prefix; /** * Our local-name. * This is the 'bar' in 'foo:bar'. */ private String localName; /** Quick flag denoting if the local name was '*' */ private boolean matchesAnyName; /** Quick flag denoting if we have a namespace prefix **/ private boolean hasPrefix; /** * Constructor. * * @param axis the axis to work through * @param prefix the name prefix * @param localName the local name * @param predicateSet the set of predicates */ public DefaultNameStep(IterableAxis axis, String prefix, String localName, PredicateSet predicateSet) { super(axis, predicateSet); this.prefix = prefix; this.localName = localName; this.matchesAnyName = "*".equals(localName); this.hasPrefix = (this.prefix != null && this.prefix.length() > 0); } /** * Gets the namespace prefix. * * @return the prefix */ public String getPrefix() { return this.prefix; } /** * Gets the local name. * * @return the local name */ public String getLocalName() { return this.localName; } /** * Does this step match any name (xpath of '*'). * * @return true if it matches any name */ public boolean isMatchesAnyName() { return matchesAnyName; } /** * Gets the step as a fully defined xpath. * * @return the full xpath for this step */ public String getText() { StringBuffer buf = new StringBuffer(64); buf.append(getAxisName()).append("::"); if (getPrefix() != null && getPrefix().length() > 0) { buf.append(getPrefix()).append(':'); } return buf.append(getLocalName()).append(super.getText()).toString(); } /** * Evaluate the context node set to find the new node set. * <p> * This method overrides the version in DefaultStep for performance. */ public List evaluate(Context context) throws JaxenException { List contextNodeSet = context.getNodeSet(); int contextSize = contextNodeSet.size(); // optimize for context size 0 if (contextSize == 0) { return Collections.EMPTY_LIST; } ContextSupport support = context.getContextSupport(); boolean namedAccess = (!matchesAnyName && getIterableAxis().supportsNamedAccess(support)); // optimize for context size 1 (common case, avoids lots of object creation) if (contextSize == 1) { Object contextNode = contextNodeSet.get(0); if (namedAccess) { // get the iterator over the nodes and check it String uri = support.translateNamespacePrefixToUri(prefix); Iterator axisNodeIter = getIterableAxis().namedAccessIterator( contextNode, support, localName, prefix, uri); if (axisNodeIter == null || axisNodeIter.hasNext() == false) { return Collections.EMPTY_LIST; } // convert iterator to list for predicate test // no need to filter as named access guarantees this List newNodeSet = new ArrayList(); while (axisNodeIter.hasNext()) { newNodeSet.add(axisNodeIter.next()); } // evaluate the predicates return getPredicateSet().evaluatePredicates(newNodeSet, support); } else { // get the iterator over the nodes and check it Iterator axisNodeIter = axisIterator(contextNode, support); if (axisNodeIter == null || axisNodeIter.hasNext() == false) { return Collections.EMPTY_LIST; } // run through iterator, filtering using matches() // adding to list for predicate test List newNodeSet = new ArrayList(); while (axisNodeIter.hasNext()) { Object eachAxisNode = axisNodeIter.next(); if (matches(eachAxisNode, support)) { newNodeSet.add(eachAxisNode); } } // evaluate the predicates return getPredicateSet().evaluatePredicates(newNodeSet, support); } } // full case Map unique = new IdentityHashMap(); List interimSet = new ArrayList(); List newNodeSet = new ArrayList(); if (namedAccess) { String uri = support.translateNamespacePrefixToUri(prefix); for (int i = 0; i < contextSize; ++i) { Object eachContextNode = contextNodeSet.get(i); Iterator axisNodeIter = getIterableAxis().namedAccessIterator( eachContextNode, support, localName, prefix, uri); if (axisNodeIter == null || axisNodeIter.hasNext() == false) { continue; } // ensure only one of each node in the result while (axisNodeIter.hasNext()) { Object eachAxisNode = axisNodeIter.next(); if (unique.put(eachAxisNode, PRESENT) == null) { interimSet.add(eachAxisNode); } } // evaluate the predicates newNodeSet.addAll(getPredicateSet().evaluatePredicates(interimSet, support)); interimSet.clear(); } } else { for (int i = 0; i < contextSize; ++i) { Object eachContextNode = contextNodeSet.get(i); Iterator axisNodeIter = axisIterator(eachContextNode, support); if (axisNodeIter == null || axisNodeIter.hasNext() == false) { continue; } // ensure only unique matching nodes in the result // XXX This iterator goes in the wrong order for descendant axes // breadth-first instead of depth first; this is where the reshuffling happens while (axisNodeIter.hasNext()) { Object eachAxisNode = axisNodeIter.next(); if (matches(eachAxisNode, support)) { if (unique.put(eachAxisNode, PRESENT) == null) { interimSet.add(eachAxisNode); } } } // evaluate the predicates newNodeSet.addAll(getPredicateSet().evaluatePredicates(interimSet, support)); interimSet.clear(); } } return newNodeSet; } /** * Checks whether the node matches this step. * * @param node the node to check * @param contextSupport the context support * @return true if matches */ public boolean matches(Object node, ContextSupport contextSupport) throws JaxenException { Navigator nav = contextSupport.getNavigator(); String myUri = null; String nodeName = null; String nodeUri = null; if (nav.isElement(node)) { nodeName = nav.getElementName(node); nodeUri = nav.getElementNamespaceUri(node); } else if (nav.isText(node)) { return false; } else if (nav.isAttribute(node)) { if (getAxis() != Axis.ATTRIBUTE) { return false; } nodeName = nav.getAttributeName(node); nodeUri = nav.getAttributeNamespaceUri(node); } else if (nav.isDocument(node)) { return false; } else if (nav.isNamespace(node)) { if (matchesAnyName && getAxis() != Axis.NAMESPACE) { // Only works for namespace::* return false; } nodeName = nav.getNamespacePrefix(node); } else { return false; } if (hasPrefix) { myUri = contextSupport.translateNamespacePrefixToUri(this.prefix); if (myUri == null) { throw new UnresolvableException("Cannot resolve namespace prefix '"+this.prefix+"'"); } } else if (matchesAnyName) { return true; } // If we map to a non-empty namespace and the node does not // or vice-versa, fail-fast. if (hasNamespace(myUri) != hasNamespace(nodeUri)) { return false; } // To fail-fast, we check the equality of // local-names first. Shorter strings compare // quicker. if (matchesAnyName || nodeName.equals(getLocalName())) { return matchesNamespaceURIs(myUri, nodeUri); } return false; } /** * Checks whether the URI represents a namespace. * * @param uri the URI to check * @return true if non-null and non-empty */ private boolean hasNamespace(String uri) { return (uri != null && uri.length() > 0); } /** * Compares two namespace URIs, handling null. * * @param uri1 the first URI * @param uri2 the second URI * @return true if equal, where null=="" */ protected boolean matchesNamespaceURIs(String uri1, String uri2) { if (uri1 == uri2) { return true; } if (uri1 == null) { return (uri2.length() == 0); } if (uri2 == null) { return (uri1.length() == 0); } return uri1.equals(uri2); } /** * Visitor pattern for the step. * * @param visitor the visitor object */ public void accept(Visitor visitor) { visitor.visit(this); } /** * Returns a full information debugging string. * * @return a debugging string */ public String toString() { return "[(DefaultNameStep): " + getPrefix() + ":" + getLocalName() + "[" + super.toString() + "]]"; } }
Removing comment about node order; that has now been fixed; also setting initial size of ArrayLists git-svn-id: 7abf240ce0ec4644a9bf59262a41ad5796234f37@556 43379f7c-b030-0410-81db-e0b70742847c
src/java/main/org/jaxen/expr/DefaultNameStep.java
Removing comment about node order; that has now been fixed; also setting initial size of ArrayLists
Java
bsd-3-clause
6a6dec1cdb5ce842c12e6370580681b70f9c62ad
0
krishagni/openspecimen,asamgir/openspecimen,krishagni/openspecimen,asamgir/openspecimen,asamgir/openspecimen,krishagni/openspecimen
package edu.wustl.catissuecore.util; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import edu.wustl.catissuecore.bean.GenericSpecimen; import edu.wustl.catissuecore.bizlogic.StorageContainerForSpecimenBizLogic; import edu.wustl.catissuecore.domain.StorageContainer; import edu.wustl.catissuecore.util.global.AppUtility; import edu.wustl.common.beans.NameValueBean; import edu.wustl.common.beans.SessionDataBean; import edu.wustl.common.exception.ApplicationException; import edu.wustl.common.util.logger.Logger; import edu.wustl.dao.DAO; import edu.wustl.dao.exception.DAOException; import edu.wustl.security.exception.SMException; /** * This class exposes the functionality to set storage containers * automatically for multiple specimens. * @author abhijit_naik * */ public class SpecimenAutoStorageContainer { private transient Logger logger = Logger.getCommonLogger(SpecimenAutoStorageContainer.class); private LinkedHashMap<String, LinkedList<GenericSpecimen>> specimenMap = new LinkedHashMap<String, LinkedList<GenericSpecimen>> (); private Long cpId = null; private LinkedHashMap<Long, LinkedHashMap<String, LinkedList<GenericSpecimen>>> collectionProtocolSpecimenMap = new LinkedHashMap<Long, LinkedHashMap<String,LinkedList<GenericSpecimen>>> (); private ArrayList<String> storageContainerIds = new ArrayList<String>(); public void setCollectionProtocol(Long cpId) { this.cpId = cpId; } public void addSpecimen(GenericSpecimen specimen, String className) { addToMap(specimen, className, specimenMap); } public void addSpecimen(GenericSpecimen specimen, String className, Long collectionProtocolId) { if (collectionProtocolSpecimenMap.get(collectionProtocolId) == null) { collectionProtocolSpecimenMap.put(collectionProtocolId, new LinkedHashMap<String, LinkedList<GenericSpecimen>> ()); } LinkedHashMap<String, LinkedList<GenericSpecimen>> targetMap = collectionProtocolSpecimenMap.get(collectionProtocolId); addToMap(specimen, className, targetMap); } private void addToMap (GenericSpecimen specimen, String className, LinkedHashMap<String, LinkedList<GenericSpecimen>> targetMap) { if( targetMap.get(className) == null) { targetMap.put(className, new LinkedList<GenericSpecimen>()); } LinkedList<GenericSpecimen> specimenList = targetMap.get(className); specimenList.add(specimen); } public void setSpecimenStoragePositions(SessionDataBean sessionDataBean) throws ApplicationException { storageContainerIds.clear(); setAutoStoragePositions(specimenMap, sessionDataBean, cpId); } public void setCollectionProtocolSpecimenStoragePositions( SessionDataBean sessionDataBean) throws ApplicationException { storageContainerIds.clear(); Set<Long> keySet = collectionProtocolSpecimenMap.keySet(); Iterator<Long> keySetIterator = keySet.iterator(); while(keySetIterator.hasNext()) { Long collectionProtocolId = keySetIterator.next(); LinkedHashMap<String, LinkedList<GenericSpecimen>> autoSpecimenMap = collectionProtocolSpecimenMap.get(collectionProtocolId); setAutoStoragePositions(autoSpecimenMap, sessionDataBean, collectionProtocolId ); } } /** * @param sessionDataBean * @throws DAOException */ private void setAutoStoragePositions( LinkedHashMap<String, LinkedList<GenericSpecimen>> autoSpecimenMap, SessionDataBean sessionDataBean, Long cpId) throws ApplicationException { DAO dao = null; try { dao = AppUtility.openDAOSession(sessionDataBean); Set<String> keySet = autoSpecimenMap.keySet(); if (!keySet.isEmpty()) { Iterator<String> keySetIterator = keySet.iterator(); while(keySetIterator.hasNext()) { String key = keySetIterator.next(); LinkedList<GenericSpecimen> specimenList = autoSpecimenMap.get(key); setSpecimenStorageDetails(specimenList,key,sessionDataBean, cpId,dao); } } } catch(DAOException daoException) { this.logger.error(daoException.getMessage(),daoException); daoException.printStackTrace(); throw AppUtility.getApplicationException(daoException, daoException.getErrorKeyName(), daoException.getMsgValues()); } finally { AppUtility.closeDAOSession(dao); } } protected void setSpecimenStorageDetails(LinkedList<GenericSpecimen> specimenDataBeanList, String className, SessionDataBean bean, Long cpId ,DAO dao) throws ApplicationException { try { StorageContainerForSpecimenBizLogic bizLogic = new StorageContainerForSpecimenBizLogic(); Iterator<GenericSpecimen> itr = specimenDataBeanList.iterator(); Map<String, LinkedList<GenericSpecimen>> spTypeMap = new LinkedHashMap<String, LinkedList<GenericSpecimen>>(); String specimenType = null; while(itr.hasNext()) { GenericSpecimen specimenDataBean = (GenericSpecimen)itr.next(); specimenType = specimenDataBean.getType(); if(!spTypeMap.keySet().contains(specimenType)) { spTypeMap.put(specimenType,new LinkedList<GenericSpecimen>()); } spTypeMap.get(specimenType).add(specimenDataBean); } Iterator<String> spTIterator = spTypeMap.keySet().iterator(); Map containerMap = null; while(spTIterator.hasNext()) { specimenType = spTIterator.next(); containerMap = bizLogic.getAllocatedContainerMapForSpecimen( AppUtility.setparameterList(cpId.longValue(),className,0, specimenType), bean, dao); populateStorageLocations(spTypeMap.get(specimenType), cpId.longValue(), containerMap, bean, className); } spTypeMap.clear(); } catch (ApplicationException exception) { this.logger.error(exception.getMessage(), exception); exception.printStackTrace(); throw AppUtility.getApplicationException( exception,exception.getErrorKeyName(), exception.getMsgValues()); } } protected void populateStorageLocations(LinkedList specimenDataBeanList, Long collectionProtocolId, Map containerMap, SessionDataBean bean, String classType) throws SMException,DAOException { int counter = 0; if (containerMap.isEmpty()) { return; } Object[] containerId = containerMap.keySet().toArray(); for (int i = 0; i < containerId.length; i++) { if(counter >= specimenDataBeanList.size()) { break; } String storageId = ((NameValueBean) containerId[i]).getValue(); StorageContainer sc = new StorageContainer(); sc.setId(Long.valueOf(storageId)); sc.setName(((NameValueBean) containerId[i]).getName()); Map xDimMap = (Map) containerMap.get(containerId[i]); if (!xDimMap.isEmpty()) { counter = populateStoragePositions(specimenDataBeanList, counter, sc, xDimMap); } } } /** * @param specimenDataBeanList * @param counter * @param sc * @param xDimMap * @return */ private int populateStoragePositions(LinkedList specimenDataBeanList, int counter, StorageContainer sc, Map xDimMap) { Object[] xDim = xDimMap.keySet().toArray(); for (int j = 0; j < xDim.length; j++) { List yDimList = (List) xDimMap.get(xDim[j]); if(counter >= specimenDataBeanList.size()) { break; } for (int k = 0; k < yDimList.size(); k++) { if(counter >= specimenDataBeanList.size()) { break; } GenericSpecimen specimenDataBean = (GenericSpecimen)specimenDataBeanList.get(counter); String stName = sc.getName(); String posOne = ((NameValueBean) xDim[j]).getValue(); String posTwo = ((NameValueBean) yDimList.get(k)).getValue(); String storageValue = stName+":"+posOne+" ,"+posTwo; if(specimenDataBean.getReadOnly()) { storageValue = specimenDataBean.getSelectedContainerName()+":"+specimenDataBean.getPositionDimensionOne()+" ,"+specimenDataBean.getPositionDimensionTwo(); k--; counter++; } else { if(!storageContainerIds.contains(storageValue)) { specimenDataBean.setContainerId(String.valueOf(sc.getId())); specimenDataBean.setSelectedContainerName(stName); specimenDataBean.setPositionDimensionOne(posOne); specimenDataBean.setPositionDimensionTwo(posTwo); storageContainerIds.add(storageValue); counter++; } else { continue; } } } } return counter; } public void fillAllocatedPositionSet(Set asignedPositonSet) { Iterator keyItr = specimenMap.keySet().iterator(); while(keyItr.hasNext()) { String key = (String)keyItr.next(); LinkedList speciList = (LinkedList)specimenMap.get(key); Iterator speciListItr = speciList.iterator(); while(speciListItr.hasNext()) { GenericSpecimen specimen = (GenericSpecimen)speciListItr.next(); //Mandar : 19Aug08 ---start if(specimen.getSelectedContainerName() != null) { String allocatedPos = specimen.getSelectedContainerName() + "#"+ specimen.getContainerId()+"#"+specimen.getPositionDimensionOne()+"#"+specimen.getPositionDimensionTwo(); asignedPositonSet.add(allocatedPos); } //Mandar : 19Aug08 ---end } } } }
WEB-INF/src/edu/wustl/catissuecore/util/SpecimenAutoStorageContainer.java
package edu.wustl.catissuecore.util; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import edu.wustl.catissuecore.bean.GenericSpecimen; import edu.wustl.catissuecore.bizlogic.StorageContainerForSpecimenBizLogic; import edu.wustl.catissuecore.domain.StorageContainer; import edu.wustl.catissuecore.util.global.AppUtility; import edu.wustl.common.beans.NameValueBean; import edu.wustl.common.beans.SessionDataBean; import edu.wustl.common.exception.ApplicationException; import edu.wustl.common.util.logger.Logger; import edu.wustl.dao.DAO; import edu.wustl.dao.exception.DAOException; import edu.wustl.security.exception.SMException; /** * This class exposes the functionality to set storage containers * automatically for multiple specimens. * @author abhijit_naik * */ public class SpecimenAutoStorageContainer { private transient Logger logger = Logger.getCommonLogger(SpecimenAutoStorageContainer.class); private LinkedHashMap<String, LinkedList<GenericSpecimen>> specimenMap = new LinkedHashMap<String, LinkedList<GenericSpecimen>> (); private Long cpId = null; private LinkedHashMap<Long, LinkedHashMap<String, LinkedList<GenericSpecimen>>> collectionProtocolSpecimenMap = new LinkedHashMap<Long, LinkedHashMap<String,LinkedList<GenericSpecimen>>> (); private ArrayList<String> storageContainerIds = new ArrayList<String>(); public void setCollectionProtocol(Long cpId) { this.cpId = cpId; } public void addSpecimen(GenericSpecimen specimen, String className) { addToMap(specimen, className, specimenMap); } public void addSpecimen(GenericSpecimen specimen, String className, Long collectionProtocolId) { if (collectionProtocolSpecimenMap.get(collectionProtocolId) == null) { collectionProtocolSpecimenMap.put(collectionProtocolId, new LinkedHashMap<String, LinkedList<GenericSpecimen>> ()); } LinkedHashMap<String, LinkedList<GenericSpecimen>> targetMap = collectionProtocolSpecimenMap.get(collectionProtocolId); addToMap(specimen, className, targetMap); } private void addToMap (GenericSpecimen specimen, String className, LinkedHashMap<String, LinkedList<GenericSpecimen>> targetMap) { if( targetMap.get(className) == null) { targetMap.put(className, new LinkedList<GenericSpecimen>()); } LinkedList<GenericSpecimen> specimenList = targetMap.get(className); specimenList.add(specimen); } public void setSpecimenStoragePositions(SessionDataBean sessionDataBean) throws ApplicationException { storageContainerIds.clear(); setAutoStoragePositions(specimenMap, sessionDataBean, cpId); } public void setCollectionProtocolSpecimenStoragePositions( SessionDataBean sessionDataBean) throws ApplicationException { storageContainerIds.clear(); Set<Long> keySet = collectionProtocolSpecimenMap.keySet(); Iterator<Long> keySetIterator = keySet.iterator(); while(keySetIterator.hasNext()) { Long collectionProtocolId = keySetIterator.next(); LinkedHashMap<String, LinkedList<GenericSpecimen>> autoSpecimenMap = collectionProtocolSpecimenMap.get(collectionProtocolId); setAutoStoragePositions(autoSpecimenMap, sessionDataBean, collectionProtocolId ); } } /** * @param sessionDataBean * @throws DAOException */ private void setAutoStoragePositions( LinkedHashMap<String, LinkedList<GenericSpecimen>> autoSpecimenMap, SessionDataBean sessionDataBean, Long cpId) throws ApplicationException { DAO dao = null; try { dao = AppUtility.openDAOSession(sessionDataBean); Set<String> keySet = autoSpecimenMap.keySet(); if (!keySet.isEmpty()) { Iterator<String> keySetIterator = keySet.iterator(); while(keySetIterator.hasNext()) { String key = keySetIterator.next(); LinkedList<GenericSpecimen> specimenList = autoSpecimenMap.get(key); setSpecimenStorageDetails(specimenList,key,sessionDataBean, cpId,dao); } } } catch(DAOException daoException) { this.logger.error(daoException.getMessage(),daoException); daoException.printStackTrace(); throw AppUtility.getApplicationException(daoException, daoException.getErrorKeyName(), daoException.getMsgValues()); } finally { AppUtility.closeDAOSession(dao); } } protected void setSpecimenStorageDetails(LinkedList<GenericSpecimen> specimenDataBeanList, String className, SessionDataBean bean, Long cpId ,DAO dao) throws ApplicationException { try { StorageContainerForSpecimenBizLogic bizLogic = new StorageContainerForSpecimenBizLogic(); Iterator<GenericSpecimen> itr = specimenDataBeanList.iterator(); Map<String, LinkedList<GenericSpecimen>> spTypeMap = new HashMap<String, LinkedList<GenericSpecimen>>(); String specimenType = null; while(itr.hasNext()) { GenericSpecimen specimenDataBean = (GenericSpecimen)itr.next(); specimenType = specimenDataBean.getType(); if(!spTypeMap.keySet().contains(specimenType)) { spTypeMap.put(specimenType,new LinkedList<GenericSpecimen>()); } spTypeMap.get(specimenType).add(specimenDataBean); } Iterator<String> spTIterator = spTypeMap.keySet().iterator(); Map containerMap = null; while(spTIterator.hasNext()) { specimenType = spTIterator.next(); containerMap = bizLogic.getAllocatedContainerMapForSpecimen( AppUtility.setparameterList(cpId.longValue(),className,0, specimenType), bean, dao); populateStorageLocations(spTypeMap.get(specimenType), cpId.longValue(), containerMap, bean, className); } spTypeMap.clear(); } catch (ApplicationException exception) { this.logger.error(exception.getMessage(), exception); exception.printStackTrace(); throw AppUtility.getApplicationException( exception,exception.getErrorKeyName(), exception.getMsgValues()); } } protected void populateStorageLocations(LinkedList specimenDataBeanList, Long collectionProtocolId, Map containerMap, SessionDataBean bean, String classType) throws SMException,DAOException { int counter = 0; if (containerMap.isEmpty()) { return; } Object[] containerId = containerMap.keySet().toArray(); for (int i = 0; i < containerId.length; i++) { if(counter >= specimenDataBeanList.size()) { break; } String storageId = ((NameValueBean) containerId[i]).getValue(); StorageContainer sc = new StorageContainer(); sc.setId(Long.valueOf(storageId)); sc.setName(((NameValueBean) containerId[i]).getName()); Map xDimMap = (Map) containerMap.get(containerId[i]); if (!xDimMap.isEmpty()) { counter = populateStoragePositions(specimenDataBeanList, counter, sc, xDimMap); } } } /** * @param specimenDataBeanList * @param counter * @param sc * @param xDimMap * @return */ private int populateStoragePositions(LinkedList specimenDataBeanList, int counter, StorageContainer sc, Map xDimMap) { Object[] xDim = xDimMap.keySet().toArray(); for (int j = 0; j < xDim.length; j++) { List yDimList = (List) xDimMap.get(xDim[j]); if(counter >= specimenDataBeanList.size()) { break; } for (int k = 0; k < yDimList.size(); k++) { if(counter >= specimenDataBeanList.size()) { break; } GenericSpecimen specimenDataBean = (GenericSpecimen)specimenDataBeanList.get(counter); String stName = sc.getName(); String posOne = ((NameValueBean) xDim[j]).getValue(); String posTwo = ((NameValueBean) yDimList.get(k)).getValue(); String storageValue = stName+":"+posOne+" ,"+posTwo; if(specimenDataBean.getReadOnly()) { storageValue = specimenDataBean.getSelectedContainerName()+":"+specimenDataBean.getPositionDimensionOne()+" ,"+specimenDataBean.getPositionDimensionTwo(); k--; counter++; } else { if(!storageContainerIds.contains(storageValue)) { specimenDataBean.setContainerId(String.valueOf(sc.getId())); specimenDataBean.setSelectedContainerName(stName); specimenDataBean.setPositionDimensionOne(posOne); specimenDataBean.setPositionDimensionTwo(posTwo); storageContainerIds.add(storageValue); counter++; } else { continue; } } } } return counter; } public void fillAllocatedPositionSet(Set asignedPositonSet) { Iterator keyItr = specimenMap.keySet().iterator(); while(keyItr.hasNext()) { String key = (String)keyItr.next(); LinkedList speciList = (LinkedList)specimenMap.get(key); Iterator speciListItr = speciList.iterator(); while(speciListItr.hasNext()) { GenericSpecimen specimen = (GenericSpecimen)speciListItr.next(); //Mandar : 19Aug08 ---start if(specimen.getSelectedContainerName() != null) { String allocatedPos = specimen.getSelectedContainerName() + "#"+ specimen.getContainerId()+"#"+specimen.getPositionDimensionOne()+"#"+specimen.getPositionDimensionTwo(); asignedPositonSet.add(allocatedPos); } //Mandar : 19Aug08 ---end } } } }
Bug 17108 - Parent 2 allocated first free positions and later Parent 1 auto allocated. SVN-Revision: 24522
WEB-INF/src/edu/wustl/catissuecore/util/SpecimenAutoStorageContainer.java
Bug 17108 - Parent 2 allocated first free positions and later Parent 1 auto allocated.
Java
mit
b815775e85d314b621288d373a400ffebab02e59
0
godotgildor/igv,igvteam/igv,igvteam/igv,godotgildor/igv,itenente/igv,igvteam/igv,godotgildor/igv,itenente/igv,amwenger/igv,amwenger/igv,igvteam/igv,amwenger/igv,godotgildor/igv,itenente/igv,itenente/igv,itenente/igv,amwenger/igv,amwenger/igv,igvteam/igv,godotgildor/igv
/* * Copyright (c) 2007-2011 by Institute for Computational Biomedicine, * Weill Medical College of Cornell University. * * This software is licensed under the terms of the GNU Lesser General Public License (LGPL), * Version 2.1 which is available at http://www.opensource.org/licenses/lgpl-2.1.php. * * THE SOFTWARE IS PROVIDED "AS IS." THE BROAD AND MIT MAKE NO REPRESENTATIONS OR * WARRANTES OF ANY KIND CONCERNING THE SOFTWARE, EXPRESS OR IMPLIED, INCLUDING, * WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR * PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER * OR NOT DISCOVERABLE. IN NO EVENT SHALL THE BROAD OR MIT, OR THEIR RESPECTIVE * TRUSTEES, DIRECTORS, OFFICERS, EMPLOYEES, AND AFFILIATES BE LIABLE FOR ANY DAMAGES * OF ANY KIND, INCLUDING, WITHOUT LIMITATION, INCIDENTAL OR CONSEQUENTIAL DAMAGES, * ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER * THE BROAD OR MIT SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT * SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. */ package org.broad.igv.goby; import edu.cornell.med.icb.goby.alignments.AlignmentReaderImpl; import edu.cornell.med.icb.identifier.DoubleIndexedIdentifier; import edu.cornell.med.icb.identifier.IndexedIdentifier; import it.unimi.dsi.lang.MutableString; import net.sf.samtools.SAMFileHeader; import net.sf.samtools.util.CloseableIterator; import org.apache.log4j.Logger; import org.broad.igv.sam.Alignment; import org.broad.igv.sam.reader.AlignmentReader; import java.io.IOException; import java.util.HashSet; import java.util.Set; /** * Query reader to parse <a href="http://goby.campagnelab.org">Goby</a> alignment files. * The compact alignment files must be sorted and indexed. * <p/> * For further information about Goby, or to obtain sample alignment files, see http://goby.campagnelab.org * * @author Fabien Campagne * Date: Jun 29, 2010 * Time: 11:43:18 AM */ public class GobyAlignmentQueryReader implements AlignmentReader { private static final Logger LOG = Logger.getLogger(GobyAlignmentQueryReader.class); private AlignmentReaderImpl reader = null; private final String basename; private DoubleIndexedIdentifier targetIdentifiers; private boolean isIndexed; private Set<String> targetSequenceNames; /** * Construct a query reader for this filename/basename. * * @param filename The filename of any file component for a Goby compact alignment, or the alignment basename. * @throws IOException If an error occurs reading the alignment. */ public GobyAlignmentQueryReader(String filename) throws IOException { basename = filename; reader = new AlignmentReaderImpl(filename); reader.readHeader(); if (!reader.isIndexed()) { final String errorMessage = "Goby alignment files must be sorted in order to be loaded in IGV. See the IGV tutorial at http://goby.campagnelab.org/ for details."; System.err.println(errorMessage); throw new UnsupportedOperationException(errorMessage); } final IndexedIdentifier identifiers = reader.getTargetIdentifiers(); // add MT as a synonym for M: identifiers.put(new MutableString("M"), identifiers.getInt(new MutableString("MT"))); targetIdentifiers = new DoubleIndexedIdentifier(identifiers); isIndexed = reader.isIndexed(); // reader.close(); // reader = null; targetSequenceNames = new HashSet(); for (MutableString ms : identifiers.keySet()) { targetSequenceNames.add(ms.toString()); } } /** * Release resources associated with this query reader. Closes the underlying Goby AlignmentReader. * * @throws IOException */ public void close() throws IOException { if (reader != null) { reader.close(); reader = null; } } public Set<String> getSequenceNames() { return targetSequenceNames; } /** * This method returns null. We are not reading a SAM file. IGV does not seem to mind receiving null. * * @return null */ public SAMFileHeader getHeader() { return null; } /** * Obtain an iterator over the entire file. * * @return An alignment iterator. */ public CloseableIterator<Alignment> iterator() { return new GobyAlignmentIterator(getNewLocalReader(), targetIdentifiers); } /** * Obtain an iterator over a genomic window. The window on reference 'sequence' extends from * start to end. The attribute 'contained' is ignored (documentation required). * * @return An alignment iterator restricted to the sequence [start end] interval. */ public final CloseableIterator<Alignment> query(String sequence, int start, int end, boolean contained) { LOG.debug(String.format("query %s %d %d %b%n", sequence, start, end, contained)); final MutableString id = new MutableString(sequence); int referenceIndex = targetIdentifiers.getIndex(id); if (referenceIndex == -1) { // try again removing a chr prefix: referenceIndex = targetIdentifiers.getIndex(id.replace("chr", "")); } try { return new GobyAlignmentIterator(getNewLocalReader(), targetIdentifiers, referenceIndex, sequence, start, end); } catch (IOException e) { LOG.error(e); return null; } } /** * Determines whether the file is indexed. * * @return True. Goby files must be indexed. */ public final boolean hasIndex() { return isIndexed; } /** * Determines whether filename can be loaded by this QueryReader. * <p/> * * @param filename Name of a file component or alignment basename. * @return True if this implementation can load the alignment corresponding to this filename. */ public static boolean supportsFileType(String filename) { final boolean result = AlignmentReaderImpl.canRead(filename); LOG.debug(String.format("supportsFileType %s result=%b", filename, result)); return result; } private AlignmentReaderImpl getNewLocalReader() { if (reader != null) return reader; try { return new AlignmentReaderImpl(basename); } catch (IOException e) { throw new RuntimeException(e); } } }
src/org/broad/igv/goby/GobyAlignmentQueryReader.java
/* * Copyright (c) 2007-2011 by Institute for Computational Biomedicine, * Weill Medical College of Cornell University. * * This software is licensed under the terms of the GNU Lesser General Public License (LGPL), * Version 2.1 which is available at http://www.opensource.org/licenses/lgpl-2.1.php. * * THE SOFTWARE IS PROVIDED "AS IS." THE BROAD AND MIT MAKE NO REPRESENTATIONS OR * WARRANTES OF ANY KIND CONCERNING THE SOFTWARE, EXPRESS OR IMPLIED, INCLUDING, * WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR * PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER * OR NOT DISCOVERABLE. IN NO EVENT SHALL THE BROAD OR MIT, OR THEIR RESPECTIVE * TRUSTEES, DIRECTORS, OFFICERS, EMPLOYEES, AND AFFILIATES BE LIABLE FOR ANY DAMAGES * OF ANY KIND, INCLUDING, WITHOUT LIMITATION, INCIDENTAL OR CONSEQUENTIAL DAMAGES, * ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER * THE BROAD OR MIT SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT * SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. */ package org.broad.igv.goby; import org.broad.igv.sam.Alignment; import org.broad.igv.sam.reader.AlignmentReader; import org.apache.log4j.Logger; import java.io.IOException; import java.util.HashSet; import java.util.Set; import net.sf.samtools.SAMFileHeader; import net.sf.samtools.util.CloseableIterator; import edu.cornell.med.icb.goby.alignments.AlignmentReaderImpl; import edu.cornell.med.icb.identifier.DoubleIndexedIdentifier; import edu.cornell.med.icb.identifier.IndexedIdentifier; import it.unimi.dsi.lang.MutableString; /** * Query reader to parse <a href="http://goby.campagnelab.org">Goby</a> alignment files. * The compact alignment files must be sorted and indexed. * <p/> * For further information about Goby, or to obtain sample alignment files, see http://goby.campagnelab.org * * @author Fabien Campagne * Date: Jun 29, 2010 * Time: 11:43:18 AM */ public class GobyAlignmentQueryReader implements AlignmentReader { private static final Logger LOG = Logger.getLogger(GobyAlignmentQueryReader.class); private AlignmentReaderImpl reader = null; private final String basename; private DoubleIndexedIdentifier targetIdentifiers; private boolean isIndexed; private Set<String> targetSequenceNames; /** * Construct a query reader for this filename/basename. * * @param filename The filename of any file component for a Goby compact alignment, or the alignment basename. * @throws IOException If an error occurs reading the alignment. */ public GobyAlignmentQueryReader(String filename) throws IOException { basename = filename; reader = new AlignmentReaderImpl(filename); reader.readHeader(); if (!reader.isIndexed()) { final String errorMessage = "Goby alignment files must be sorted in order to be loaded in IGV. See the IGV tutorial at http://goby.campagnelab.org/ for details."; System.err.println(errorMessage); throw new UnsupportedOperationException(errorMessage); } final IndexedIdentifier identifiers = reader.getTargetIdentifiers(); // add MT as a synonym for M: identifiers.put(new MutableString("M"), identifiers.getInt(new MutableString("MT"))); targetIdentifiers = new DoubleIndexedIdentifier(identifiers); isIndexed = reader.isIndexed(); // reader.close(); // reader = null; targetSequenceNames = new HashSet(); for(MutableString ms : identifiers.keySet()) { targetSequenceNames.add(ms.toString()); } } /** * Release resources associated with this query reader. Closes the underlying Goby AlignmentReader. * * @throws IOException */ public void close() throws IOException { if (reader!=null) { reader.close(); reader=null; } } public Set<String> getSequenceNames() { return targetSequenceNames; } /** * This method returns null. We are not reading a SAM file. IGV does not seem to mind receiving null. * * @return null */ public SAMFileHeader getHeader() { return null; } /** * Obtain an iterator over the entire file. * * @return An alignment iterator. */ public CloseableIterator<Alignment> iterator() { return new GobyAlignmentIterator(getNewLocalReader(), targetIdentifiers); } /** * Obtain an iterator over a genomic window. The window on reference 'sequence' extends from * start to end. The attribute 'contained' is ignored (documentation required). * * @return An alignment iterator restricted to the sequence [start end] interval. */ public final CloseableIterator<Alignment> query(String sequence, int start, int end, boolean contained) { LOG.debug(String.format("query %s %d %d %b%n", sequence, start, end, contained)); int referenceIndex = targetIdentifiers.getIndex(new MutableString(sequence).replace("chr", "")); try { return new GobyAlignmentIterator(getNewLocalReader(), targetIdentifiers, referenceIndex, sequence, start, end); } catch (IOException e) { LOG.error(e); return null; } } /** * Determines whether the file is indexed. * * @return True. Goby files must be indexed. */ public final boolean hasIndex() { return isIndexed; } /** * Determines whether filename can be loaded by this QueryReader. * <p/> * @param filename Name of a file component or alignment basename. * @return True if this implementation can load the alignment corresponding to this filename. */ public static boolean supportsFileType(String filename) { final boolean result = AlignmentReaderImpl.canRead(filename); LOG.debug(String.format("supportsFileType %s result=%b", filename, result)); return result; } private AlignmentReaderImpl getNewLocalReader() { if (reader != null) return reader; try { return new AlignmentReaderImpl(basename); } catch (IOException e) { throw new RuntimeException(e); } } }
Some goby alignments may already have the target sequence names stripped of the "chr" prefix. Make it possible to load these alignments by first trying to locate the sequence name without removing chr. git-svn-id: b5cf87c434d9ee7c8f18865e4378c9faabe04646@2038 17392f64-ead8-4cea-ae29-09b3ab513800
src/org/broad/igv/goby/GobyAlignmentQueryReader.java
Some goby alignments may already have the target sequence names stripped of the "chr" prefix. Make it possible to load these alignments by first trying to locate the sequence name without removing chr.
Java
mit
8c414824e1cba1d91595067e58e7696ad9e7d6de
0
erwinvaneyk/distributedrmi
package nl.erwinvaneyk.core; import static org.junit.Assert.*; import java.rmi.RemoteException; import nl.erwinvaneyk.communication.exceptions.CommunicationException; import nl.erwinvaneyk.communication.exceptions.PortAlreadyInUseException; import org.junit.Test; public class NodeImplTest { @Test public void startCluster() throws CommunicationException { Node headnode = NodeImpl.startCluster(1817, "test-cluster"); assertEquals(headnode.getState().getAddress().getLocation().getPort(), 1817); assertEquals(headnode.getState().getAddress().getIdentifier(), "NodeImpl-0"); assertEquals(headnode.getState().getClusterId(), "test-cluster"); headnode.disconnect(); } @Test public void shutdownNode() throws CommunicationException { Node node = NodeImpl.startCluster(1818, "test-cluster"); node.disconnect(); // Check if port has been released NodeImpl.startCluster(1818, "test-cluster").disconnect(); } @Test public void joinCluster() throws CommunicationException { Node node1 = NodeImpl.startCluster(1819, "test-cluster"); Node node2 = NodeImpl.connectToCluster(1820, node1.getState().getAddress()); assertEquals(node2.getState().getClusterId(), "test-cluster"); node1.disconnect(); node2.disconnect(); } @Test public void joinClusterOnNonStartingNode() throws CommunicationException { Node node1 = NodeImpl.startCluster(1819, "test-cluster"); Node node2 = NodeImpl.connectToCluster(1820, node1.getState().getAddress()); Node node3 = NodeImpl.connectToCluster(1821, node2.getState().getAddress()); Node node4 = NodeImpl.connectToCluster(1822, node3.getState().getAddress()); assertEquals(node4.getState().getClusterId(), "test-cluster"); System.out.println(node4.getState()); assertEquals(3, node4.getState().getConnectedNodes().size()); node1.disconnect(); node2.disconnect(); node3.disconnect(); node4.disconnect(); } }
src/test/java/nl/erwinvaneyk/core/NodeImplTest.java
package nl.erwinvaneyk.core; import static org.junit.Assert.*; import java.rmi.RemoteException; import nl.erwinvaneyk.communication.exceptions.CommunicationException; import nl.erwinvaneyk.communication.exceptions.PortAlreadyInUseException; import org.junit.Test; public class NodeImplTest { @Test public void startCluster() throws CommunicationException { Node headnode = NodeImpl.startCluster(1817, "test-cluster"); assertEquals(headnode.getState().getAddress().getLocation().getPort(), 1817); assertEquals(headnode.getState().getAddress().getIdentifier(), "NodeImpl-0"); assertEquals(headnode.getState().getClusterId(), "test-cluster"); headnode.disconnect(); } @Test public void shutdownNode() throws CommunicationException { Node node = NodeImpl.startCluster(1818, "test-cluster"); node.disconnect(); // Check if port has been released NodeImpl.startCluster(1818, "test-cluster").disconnect(); } @Test public void joinCluster() throws CommunicationException { Node node1 = NodeImpl.startCluster(1819, "test-cluster"); Node node2 = NodeImpl.connectToCluster(1820, node1.getState().getAddress()); assertEquals(node2.getState().getClusterId(), "test-cluster"); node1.disconnect(); node2.disconnect(); } }
Add test for large clusters
src/test/java/nl/erwinvaneyk/core/NodeImplTest.java
Add test for large clusters
Java
mit
79089f0863da0936d207a045fd4cf5780ed37c91
0
RUCD/apt-graph,RUCD/apt-graph,RUCD/apt-graph,RUCD/apt-graph
/* * The MIT License * * Copyright 2016 Thibault Debatty. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package aptgraph.server; import aptgraph.core.Request; import info.debatty.java.graphs.Graph; import info.debatty.java.graphs.Neighbor; import info.debatty.java.graphs.NeighborList; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; /** * * @author Thibault Debatty */ public class RequestHandler { private final HashMap<String, LinkedList<Graph<Request>>> user_graphs; RequestHandler(final HashMap<String, LinkedList<Graph<Request>>> user_graphs) { this.user_graphs = user_graphs; } /** * A test json-rpc call, with no argument, that should return "hello". * @return */ public final String test() { return "hello"; } /** * A dummy method that returns some clusters of nodes and edges. * @return */ public final List<Graph<Request>> dummy() { Graph<Request> graph = user_graphs.get("219.253.194.242").getFirst(); // Feature Fusion // URL/Domain clustering // Prune & clustering graph.prune(0.9); ArrayList<Graph<Request>> clusters = graph.connectedComponents(); // Filtering LinkedList<Graph<Request>> filtered = new LinkedList<Graph<Request>>(); for (Graph<Request> subgraph : clusters) { if (subgraph.size() < 10) { filtered.add(subgraph); } } System.out.println("Found " + filtered.size() + " clusters"); return filtered; } /** * * @param user_temp * @param feature_ordered_weights * @param prune_threshold * @param feature_weights * @param max_cluster_size * @return */ public final List<Graph<Domain>> analyze( final String user_temp, final double[] feature_weights, final double[] feature_ordered_weights, final double prune_threshold, final int max_cluster_size) { // START user selection // List of the user LinkedList<String> users = new LinkedList<String>(); for (Map.Entry<String, LinkedList<Graph<Request>>> entry_set : user_graphs.entrySet()) { String key = entry_set.getKey(); users.add(key); } System.out.println("List of user : " + users); // Choice of the graphs of the user(need to be choosed on the web page) String user = users.getFirst(); // a remplacer par user_temp // END user selection LinkedList<Graph<Request>> graphs = user_graphs.get(user); // Verify the sum of the weights double sum_feature_weights = 0; for (double d : feature_weights) { sum_feature_weights += d; } double sum_ordered_weights = 0; for (double d : feature_ordered_weights) { sum_ordered_weights += d; } if (sum_feature_weights != 1 || sum_ordered_weights != 1) { System.out.println("Error with weights"); return null; } // Fusion of the features (Graph of Requests) Graph<Request> merged_graph = computeFusionFeatures(graphs, feature_ordered_weights, feature_weights); // The json-rpc request was probably canceled by the user if (Thread.currentThread().isInterrupted()) { return null; } // From Graph of Requests to HashMap of Domains // (it contains every requests of a specific domain, for each domain) HashMap<String, Domain> domains = computeDomainGraph(merged_graph); // Compute similarity between domains and build domain graph // A domain is (for now) a list of Request. Graph<Domain> domain_graph = new Graph<Domain>(Integer.MAX_VALUE); // For each domain for (Entry<String, Domain> domain_entry : domains.entrySet()) { // The json-rpc request was probably canceled by the user if (Thread.currentThread().isInterrupted()) { return null; } String domain_name = domain_entry.getKey(); Domain domain_node = domain_entry.getValue(); HashMap<Domain, Double> other_domains_sim = new HashMap<Domain, Double>(); // For each request in this domain for (Request request_node : domain_node) { // Check each neighbor NeighborList neighbors = merged_graph.getNeighbors(request_node); for (Neighbor<Request> neighbor : neighbors) { Request target_request = neighbor.node; // Find the corresponding domain name String other_domain_name = target_request.getDomain(); if (other_domain_name.equals(domain_name)) { continue; } Domain other_domain = domains.get(other_domain_name); double new_similarity = neighbor.similarity; if (other_domains_sim.containsKey(other_domain)) { new_similarity += other_domains_sim.get(other_domain); } other_domains_sim.put(other_domain, new_similarity); } } NeighborList this_domain_neighbors = new NeighborList(1000); for (Entry<Domain, Double> other_domain_entry : other_domains_sim.entrySet()) { this_domain_neighbors.add(new Neighbor( other_domain_entry.getKey(), other_domain_entry.getValue())); } domain_graph.put(domain_node, this_domain_neighbors); } // Prune & clustering // The json-rpc request was probably canceled by the user if (Thread.currentThread().isInterrupted()) { return null; } domain_graph.prune(prune_threshold); // The json-rpc request was probably canceled by the user if (Thread.currentThread().isInterrupted()) { return null; } ArrayList<Graph<Domain>> clusters = domain_graph.connectedComponents(); // Filtering // The json-rpc request was probably canceled by the user if (Thread.currentThread().isInterrupted()) { return null; } LinkedList<Graph<Domain>> filtered = new LinkedList<Graph<Domain>>(); for (Graph<Domain> subgraph : clusters) { if (subgraph.size() < max_cluster_size) { filtered.add(subgraph); } } System.out.println("Found " + filtered.size() + " clusters"); return filtered; } private Graph<Request> computeFusionFeatures( final LinkedList<Graph<Request>> graphs, final double[] feature_ordered_weights, final double[] feature_weights) { int k = graphs.getFirst().getK(); // Feature Fusion // Weighted average using parameter feature_weights Graph<Request> merged_graph = new Graph<Request>(k); for (Request node : graphs.getFirst().getNodes()) { // The json-rpc request was probably canceled by the user if (Thread.currentThread().isInterrupted()) { return null; } HashMap<Request, Double> all_neighbors = new HashMap<Request, Double>(); for (int i = 0; i < graphs.size(); i++) { Graph<Request> feature_graph = graphs.get(i); NeighborList feature_neighbors = feature_graph.getNeighbors(node); for (Neighbor<Request> feature_neighbor : feature_neighbors) { double new_similarity = feature_weights[i] * feature_neighbor.similarity; if (all_neighbors.containsKey(feature_neighbor.node)) { new_similarity += all_neighbors.get(feature_neighbor.node); } all_neighbors.put(feature_neighbor.node, new_similarity); } } NeighborList nl = new NeighborList(k); for (Entry<Request, Double> entry : all_neighbors.entrySet()) { nl.add(new Neighbor(entry.getKey(), entry.getValue())); } merged_graph.put(node, nl); } return merged_graph; } private HashMap<String, Domain> computeDomainGraph( final Graph<Request> merged_graph) { // URL/Domain clustering // Associate each domain_name (String) to a Node<Domain> HashMap<String, Domain> domains = new HashMap<String, Domain>(); for (Request node : merged_graph.getNodes()) { String domain_name = node.getDomain(); Domain domain_node; if (domains.containsKey(domain_name)) { domain_node = domains.get(domain_name); } else { domain_node = new Domain(); domain_node.setName(domain_name); domains.put(domain_name, domain_node); } domain_node.add(node); } return domains; } }
server/src/main/java/aptgraph/server/RequestHandler.java
/* * The MIT License * * Copyright 2016 Thibault Debatty. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package aptgraph.server; import aptgraph.core.Request; import info.debatty.java.graphs.Graph; import info.debatty.java.graphs.Neighbor; import info.debatty.java.graphs.NeighborList; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; /** * * @author Thibault Debatty */ public class RequestHandler { private final HashMap<String, LinkedList<Graph<Request>>> user_graphs; RequestHandler(final HashMap<String, LinkedList<Graph<Request>>> user_graphs) { this.user_graphs = user_graphs; } /** * A test json-rpc call, with no argument, that should return "hello". * @return */ public final String test() { return "hello"; } /** * A dummy method that returns some clusters of nodes and edges. * @return */ public final List<Graph<Request>> dummy() { Graph<Request> graph = user_graphs.get("219.253.194.242").getFirst(); // Feature Fusion // URL/Domain clustering // Prune & clustering graph.prune(0.9); ArrayList<Graph<Request>> clusters = graph.connectedComponents(); // Filtering LinkedList<Graph<Request>> filtered = new LinkedList<Graph<Request>>(); for (Graph<Request> subgraph : clusters) { if (subgraph.size() < 10) { filtered.add(subgraph); } } System.out.println("Found " + filtered.size() + " clusters"); return filtered; } /** * * @param user * @param feature_ordered_weights * @param prune_threshold * @param feature_weights * @param max_cluster_size * @return */ public final List<Graph<Domain>> analyze( final String user_temp, final double[] feature_weights, final double[] feature_ordered_weights, final double prune_threshold, final int max_cluster_size) { // START user selection // List of the user LinkedList<String> users = new LinkedList<String>(); for (Map.Entry<String, LinkedList<Graph<Request>>> entry_set : user_graphs.entrySet()) { String key = entry_set.getKey(); users.add(key); } System.out.println("List of user : " + users); // Choice of the graphs of the user(need to be choosed on the web page) String user = users.getFirst(); // a remplacer par user_temp // END user selection LinkedList<Graph<Request>> graphs = user_graphs.get(user); // Verify the sum of the weights double sum_feature_weights = 0; for (double d : feature_weights) { sum_feature_weights += d; } double sum_ordered_weights = 0; for (double d : feature_ordered_weights) { sum_ordered_weights += d; } if (sum_feature_weights != 1 || sum_ordered_weights != 1) { System.out.println("Error with weights"); return null; } // Fusion of the features (Graph of Requests) Graph<Request> merged_graph = computeFusionFeatures(graphs, feature_ordered_weights, feature_weights); // The json-rpc request was probably canceled by the user if (Thread.currentThread().isInterrupted()) { return null; } // From Graph of Requests to HashMap of Domains // (it contains every requests of a specific domain, for each domain) HashMap<String, Domain> domains = computeDomainGraph(merged_graph); // Compute similarity between domains and build domain graph // A domain is (for now) a list of Request. Graph<Domain> domain_graph = new Graph<Domain>(Integer.MAX_VALUE); // For each domain for (Entry<String, Domain> domain_entry : domains.entrySet()) { // The json-rpc request was probably canceled by the user if (Thread.currentThread().isInterrupted()) { return null; } String domain_name = domain_entry.getKey(); Domain domain_node = domain_entry.getValue(); HashMap<Domain, Double> other_domains_sim = new HashMap<Domain, Double>(); // For each request in this domain for (Request request_node : domain_node) { // Check each neighbor NeighborList neighbors = merged_graph.getNeighbors(request_node); for (Neighbor<Request> neighbor : neighbors) { Request target_request = neighbor.node; // Find the corresponding domain name String other_domain_name = target_request.getDomain(); if (other_domain_name.equals(domain_name)) { continue; } Domain other_domain = domains.get(other_domain_name); double new_similarity = neighbor.similarity; if (other_domains_sim.containsKey(other_domain)) { new_similarity += other_domains_sim.get(other_domain); } other_domains_sim.put(other_domain, new_similarity); } } NeighborList this_domain_neighbors = new NeighborList(1000); for (Entry<Domain, Double> other_domain_entry : other_domains_sim.entrySet()) { this_domain_neighbors.add(new Neighbor( other_domain_entry.getKey(), other_domain_entry.getValue())); } domain_graph.put(domain_node, this_domain_neighbors); } // Prune & clustering // The json-rpc request was probably canceled by the user if (Thread.currentThread().isInterrupted()) { return null; } domain_graph.prune(prune_threshold); // The json-rpc request was probably canceled by the user if (Thread.currentThread().isInterrupted()) { return null; } ArrayList<Graph<Domain>> clusters = domain_graph.connectedComponents(); // Filtering // The json-rpc request was probably canceled by the user if (Thread.currentThread().isInterrupted()) { return null; } LinkedList<Graph<Domain>> filtered = new LinkedList<Graph<Domain>>(); for (Graph<Domain> subgraph : clusters) { if (subgraph.size() < max_cluster_size) { filtered.add(subgraph); } } System.out.println("Found " + filtered.size() + " clusters"); return filtered; } private Graph<Request> computeFusionFeatures( final LinkedList<Graph<Request>> graphs, final double[] feature_ordered_weights, final double[] feature_weights) { int k = graphs.getFirst().getK(); // Feature Fusion // Weighted average using parameter feature_weights Graph<Request> merged_graph = new Graph<Request>(k); for (Request node : graphs.getFirst().getNodes()) { // The json-rpc request was probably canceled by the user if (Thread.currentThread().isInterrupted()) { return null; } HashMap<Request, Double> all_neighbors = new HashMap<Request, Double>(); for (int i = 0; i < graphs.size(); i++) { Graph<Request> feature_graph = graphs.get(i); NeighborList feature_neighbors = feature_graph.getNeighbors(node); for (Neighbor<Request> feature_neighbor : feature_neighbors) { double new_similarity = feature_weights[i] * feature_neighbor.similarity; if (all_neighbors.containsKey(feature_neighbor.node)) { new_similarity += all_neighbors.get(feature_neighbor.node); } all_neighbors.put(feature_neighbor.node, new_similarity); } } NeighborList nl = new NeighborList(k); for (Entry<Request, Double> entry : all_neighbors.entrySet()) { nl.add(new Neighbor(entry.getKey(), entry.getValue())); } merged_graph.put(node, nl); } return merged_graph; } private HashMap<String, Domain> computeDomainGraph( final Graph<Request> merged_graph) { // URL/Domain clustering // Associate each domain_name (String) to a Node<Domain> HashMap<String, Domain> domains = new HashMap<String, Domain>(); for (Request node : merged_graph.getNodes()) { String domain_name = node.getDomain(); Domain domain_node; if (domains.containsKey(domain_name)) { domain_node = domains.get(domain_name); } else { domain_node = new Domain(); domain_node.setName(domain_name); domains.put(domain_name, domain_node); } domain_node.add(node); } return domains; } }
Minor typo correction
server/src/main/java/aptgraph/server/RequestHandler.java
Minor typo correction
Java
mit
fe93fdebef1656a71a74492827f7325136d768d8
0
ReneMuetti/InGame-Info-XML-1,Lunatrius/InGame-Info-XML
package com.github.lunatrius.ingameinfo.reference; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; public class Reference { public static final String MODID = "InGameInfoXML"; public static final String NAME = "InGame Info XML"; public static final String VERSION = "${version}"; public static final String FORGE = "${forgeversion}"; public static final String MINECRAFT = "${mcversion}"; public static final String PROXY_COMMON = "com.github.lunatrius.ingameinfo.proxy.CommonProxy"; public static final String PROXY_CLIENT = "com.github.lunatrius.ingameinfo.proxy.ClientProxy"; public static final String GUI_FACTORY = "com.github.lunatrius.ingameinfo.client.gui.GuiFactory"; public static Logger logger = LogManager.getLogger(Reference.MODID); }
src/main/java/com/github/lunatrius/ingameinfo/reference/Reference.java
package com.github.lunatrius.ingameinfo.reference; import org.apache.logging.log4j.Logger; public class Reference { public static final String MODID = "InGameInfoXML"; public static final String NAME = "InGame Info XML"; public static final String VERSION = "${version}"; public static final String FORGE = "${forgeversion}"; public static final String MINECRAFT = "${mcversion}"; public static final String PROXY_COMMON = "com.github.lunatrius.ingameinfo.proxy.CommonProxy"; public static final String PROXY_CLIENT = "com.github.lunatrius.ingameinfo.proxy.ClientProxy"; public static final String GUI_FACTORY = "com.github.lunatrius.ingameinfo.client.gui.GuiFactory"; public static Logger logger = null; }
Added default logger.
src/main/java/com/github/lunatrius/ingameinfo/reference/Reference.java
Added default logger.
Java
mit
8042bec9eae33ed168fa198b9be8d066be21d4ba
0
Rift-Runners/ConsULTIMATE,Rift-Runners/ConsULTIMATE,Rift-Runners/ConsULTIMATE
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.riftrunners.consultimate.model.entity; import java.io.Serializable; import java.util.Date; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.Table; /** * * @author Guilherme */ @Entity @Table(name = "transacao") public class Transacao implements Serializable{ @Id @GeneratedValue(strategy = GenerationType.AUTO) private Long id; @ManyToOne(targetEntity = Cliente.class, cascade = CascadeType.REFRESH) @JoinColumn(name = "cliente_id") private Cliente cliente; @ManyToOne(targetEntity = Consultor.class, cascade = CascadeType.REFRESH) @JoinColumn(name = "consultor_id") private Consultor consultor; private Double valor; private Integer horas; @Column(name="data_da_compra") private Date dataDaCompra; public Transacao(Cliente cliente, Consultor consultor, Double valor, Integer horas) { this.cliente = cliente; this.consultor = consultor; this.valor = valor; this.horas = horas; this.dataDaCompra = new Date(); } public Cliente getCliente() { return cliente; } public void setCliente(Cliente cliente) { this.cliente = cliente; } public Consultor getConsultor() { return consultor; } public void setConsultor(Consultor consultor) { this.consultor = consultor; } public Double getValor() { return valor; } public void setValor(Double valor) { this.valor = valor; } public Integer getHoras() { return horas; } public void setHoras(Integer horas) { this.horas = horas; } public Date getDataDaCompra() { return dataDaCompra; } public void setDataDaCompra(Date dataDaCompra) { this.dataDaCompra = dataDaCompra; } }
src/main/java/com/riftrunners/consultimate/model/entity/Transacao.java
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.riftrunners.consultimate.model.entity; import java.util.Date; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.FetchType; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; /** * * @author Guilherme */ public class Transacao { @ManyToOne(targetEntity = Cliente.class, cascade = CascadeType.REFRESH) @JoinColumn(name = "cliente_id") private Cliente cliente; @ManyToOne(targetEntity = Consultor.class, cascade = CascadeType.REFRESH) @JoinColumn(name = "consultor_id") private Consultor consultor; private Double valor; private Integer horas; @Column(name="data_da_compra") private Date dataDaCompra; public Transacao(Cliente cliente, Consultor consultor, Double valor, Integer horas) { this.cliente = cliente; this.consultor = consultor; this.valor = valor; this.horas = horas; this.dataDaCompra = new Date(); } public Cliente getCliente() { return cliente; } public void setCliente(Cliente cliente) { this.cliente = cliente; } public Consultor getConsultor() { return consultor; } public void setConsultor(Consultor consultor) { this.consultor = consultor; } public Double getValor() { return valor; } public void setValor(Double valor) { this.valor = valor; } public Integer getHoras() { return horas; } public void setHoras(Integer horas) { this.horas = horas; } public Date getDataDaCompra() { return dataDaCompra; } public void setDataDaCompra(Date dataDaCompra) { this.dataDaCompra = dataDaCompra; } }
Transação conectada ao banco (id de cons e cli como FK)
src/main/java/com/riftrunners/consultimate/model/entity/Transacao.java
Transação conectada ao banco (id de cons e cli como FK)
Java
mit
21505971b4f486dbfdbe1703c131054ce5119c67
0
alkammar/morkim
package lib.morkim.mfw.ui; import android.content.Context; import java.util.Observable; import java.util.Observer; import lib.morkim.mfw.app.AppContext; import lib.morkim.mfw.domain.Model; import lib.morkim.mfw.usecase.UseCase; import lib.morkim.mfw.usecase.UseCaseProgress; import lib.morkim.mfw.usecase.UseCaseResult; import lib.morkim.mfw.usecase.UseCaseStateListener; public abstract class Controller extends Observable implements Observer, UseCaseStateListener { protected Viewable viewable; private AppContext appContext; private boolean isRegisteredToBackgroundData; public Controller(Viewable viewable) { this.viewable = viewable; appContext = createContext(); executeInitializationTask(); } protected AppContext createContext() { return viewable.getMorkimContext(); } protected void executeInitializationTask() {} protected AppContext getAppContext() { return appContext; } protected void unregisterBackgroundData() {} protected Model getModel() { return appContext.getModel(); } @Override public void update(Observable observable, Object data) {} @Override public void onUseCaseStart(UseCase useCase) {} @Override public void onUseCaseUpdate(UseCaseProgress response) {} @Override public void onUseCaseComplete(UseCaseResult response) {} @Override public void onUseCaseCancel() {} protected void finish() { viewable.finish(); } protected void keepScreenOn(boolean keepOn) { viewable.keepScreenOn(keepOn); } public void onDialogPositive(String tag) { } public void onDialogNegative(String tag) { } Viewable getViewable() { return viewable; } protected Context getContext() { return viewable.getContext(); } }
app/src/main/java/lib/morkim/mfw/ui/Controller.java
package lib.morkim.mfw.ui; import android.content.Context; import java.util.Observable; import java.util.Observer; import lib.morkim.mfw.app.AppContext; import lib.morkim.mfw.domain.Model; import lib.morkim.mfw.usecase.UseCase; import lib.morkim.mfw.usecase.UseCaseProgress; import lib.morkim.mfw.usecase.UseCaseResult; import lib.morkim.mfw.usecase.UseCaseStateListener; public abstract class Controller implements Observer, UseCaseStateListener { protected Viewable viewable; private AppContext appContext; private boolean isRegisteredToBackgroundData; public Controller(Viewable viewable) { this.viewable = viewable; appContext = createContext(); executeInitializationTask(); } protected AppContext createContext() { return viewable.getMorkimContext(); } protected void executeInitializationTask() {} protected AppContext getAppContext() { return appContext; } protected void unregisterBackgroundData() {} protected Model getModel() { return appContext.getModel(); } @Override public void update(Observable observable, Object data) {} @Override public void onUseCaseStart(UseCase useCase) {} @Override public void onUseCaseUpdate(UseCaseProgress response) {} @Override public void onUseCaseComplete(UseCaseResult response) {} @Override public void onUseCaseCancel() {} protected void finish() { viewable.finish(); } protected void keepScreenOn(boolean keepOn) { viewable.keepScreenOn(keepOn); } public void onDialogPositive(String tag) { } public void onDialogNegative(String tag) { } Viewable getViewable() { return viewable; } protected Context getContext() { return viewable.getContext(); } }
Remove view model in presenter
app/src/main/java/lib/morkim/mfw/ui/Controller.java
Remove view model in presenter
Java
mit
2430a885c3c55be351ea20309a6b71a4d41289c7
0
jvoegele/pulse,fluxroot/pulse,jvoegele/pulse,fluxroot/pulse,jvoegele/pulse
/* * Copyright 2013-2014 the original author or authors. * * This file is part of Pulse Chess. * * Pulse Chess is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Pulse Chess is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Pulse Chess. If not, see <http://www.gnu.org/licenses/>. */ package com.fluxchess.pulse; public final class MoveGenerator { // Move deltas public static final int[][] moveDeltaPawn = { {Square.deltaN, Square.deltaNE, Square.deltaNW}, // Color.WHITE {Square.deltaS, Square.deltaSE, Square.deltaSW} // Color.BLACK }; public static final int[] moveDeltaKnight = { Square.deltaN + Square.deltaN + Square.deltaE, Square.deltaN + Square.deltaN + Square.deltaW, Square.deltaN + Square.deltaE + Square.deltaE, Square.deltaN + Square.deltaW + Square.deltaW, Square.deltaS + Square.deltaS + Square.deltaE, Square.deltaS + Square.deltaS + Square.deltaW, Square.deltaS + Square.deltaE + Square.deltaE, Square.deltaS + Square.deltaW + Square.deltaW }; public static final int[] moveDeltaBishop = { Square.deltaNE, Square.deltaNW, Square.deltaSE, Square.deltaSW }; public static final int[] moveDeltaRook = { Square.deltaN, Square.deltaE, Square.deltaS, Square.deltaW }; public static final int[] moveDeltaQueen = { Square.deltaN, Square.deltaE, Square.deltaS, Square.deltaW, Square.deltaNE, Square.deltaNW, Square.deltaSE, Square.deltaSW }; public static final int[] moveDeltaKing = { Square.deltaN, Square.deltaE, Square.deltaS, Square.deltaW, Square.deltaNE, Square.deltaNW, Square.deltaSE, Square.deltaSW }; // We will store a MoveGenerator for each ply so we don't have to create them // in search. (which is expensive) private static final MoveGenerator[] moveGenerators = new MoveGenerator[Search.MAX_HEIGHT]; // We will use a staged move generation so we can easily extend it with // other features like transposition tables. private static final State[] mainStates = {State.BEGIN, State.MAIN, State.END}; private static final State[] quiescentStates = {State.BEGIN, State.QUIESCENT, State.END}; private Board board = null; private boolean isCheck = false; private State[] states = null; private int currentStateIndex = 0; private final MoveList moveList = new MoveList(); private int currentMoveIndex = 0; private static enum State { BEGIN, MAIN, QUIESCENT, END } static { for (int i = 0; i < Search.MAX_HEIGHT; ++i) { moveGenerators[i] = new MoveGenerator(); } } public static MoveGenerator getMoveGenerator(Board board, int depth, int height, boolean isCheck) { assert board != null; assert height >= 0 && height < Search.MAX_HEIGHT; MoveGenerator moveGenerator = moveGenerators[height]; moveGenerator.board = board; moveGenerator.isCheck = isCheck; moveGenerator.currentStateIndex = 0; moveGenerator.moveList.size = 0; moveGenerator.currentMoveIndex = 0; if (depth > 0) { moveGenerator.states = mainStates; } else { moveGenerator.states = quiescentStates; } return moveGenerator; } private MoveGenerator() { } /** * Returns the next legal move. We will go through our states and generate * the appropriate moves for the current state. * * @return the next legal move, or Move.NOMOVE if there's is no next move. */ public int next() { while (true) { // Check whether we have any move in the list if (currentMoveIndex < moveList.size) { int move = moveList.entries[currentMoveIndex++].move; switch (states[currentStateIndex]) { case MAIN: // Discard all non-legal moves if (!isLegal(move)) { continue; } break; case QUIESCENT: // Discard all non-legal moves. If not in check return only capturing moves. if (!isLegal(move) || (!isCheck && Move.getTargetPiece(move) == Piece.NOPIECE)) { continue; } break; default: assert false : states[currentStateIndex]; break; } return move; } // If we don't have any move in the list, lets generate the moves for the // next state. ++currentStateIndex; currentMoveIndex = 0; moveList.size = 0; // We simply generate all moves at once here. However we could also // generate capturing moves first and then all non-capturing moves. switch (states[currentStateIndex]) { case MAIN: addDefaultMoves(moveList); if (!isCheck) { int square = Bitboard.next(board.kings[board.activeColor].squares); addCastlingMoves(moveList, square); } moveList.rateFromMVVLVA(); moveList.sort(); break; case QUIESCENT: addDefaultMoves(moveList); moveList.rateFromMVVLVA(); moveList.sort(); break; case END: return Move.NOMOVE; default: assert false; break; } } } private void addDefaultMoves(MoveList list) { assert list != null; int activeColor = board.activeColor; for (long squares = board.pawns[activeColor].squares; squares != 0; squares &= squares - 1) { int square = Bitboard.next(squares); addPawnMoves(list, square); } for (long squares = board.knights[activeColor].squares; squares != 0; squares &= squares - 1) { int square = Bitboard.next(squares); addMoves(list, square, moveDeltaKnight); } for (long squares = board.bishops[activeColor].squares; squares != 0; squares &= squares - 1) { int square = Bitboard.next(squares); addMoves(list, square, moveDeltaBishop); } for (long squares = board.rooks[activeColor].squares; squares != 0; squares &= squares - 1) { int square = Bitboard.next(squares); addMoves(list, square, moveDeltaRook); } for (long squares = board.queens[activeColor].squares; squares != 0; squares &= squares - 1) { int square = Bitboard.next(squares); addMoves(list, square, moveDeltaQueen); } int square = Bitboard.next(board.kings[activeColor].squares); addMoves(list, square, moveDeltaKing); } private void addMoves(MoveList list, int originSquare, int[] moveDelta) { assert list != null; assert Square.isValid(originSquare); assert moveDelta != null; int originPiece = board.board[originSquare]; assert Piece.isValid(originPiece); boolean sliding = Piece.Type.isSliding(Piece.getType(originPiece)); int oppositeColor = Color.opposite(Piece.getColor(originPiece)); for (int delta : moveDelta) { int targetSquare = originSquare + delta; while (Square.isLegal(targetSquare)) { int targetPiece = board.board[targetSquare]; if (targetPiece == Piece.NOPIECE) { list.entries[list.size++].move = Move.valueOf(Move.Type.NORMAL, originSquare, targetSquare, originPiece, Piece.NOPIECE, Piece.Type.NOTYPE); if (!sliding) { break; } targetSquare += delta; } else { if (Piece.getColor(targetPiece) == oppositeColor && Piece.getType(targetPiece) != Piece.Type.KING) { list.entries[list.size++].move = Move.valueOf(Move.Type.NORMAL, originSquare, targetSquare, originPiece, targetPiece, Piece.Type.NOTYPE); } break; } } } } private void addPawnMoves(MoveList list, int pawnSquare) { assert list != null; assert Square.isValid(pawnSquare); int pawnPiece = board.board[pawnSquare]; assert Piece.isValid(pawnPiece); assert Piece.getType(pawnPiece) == Piece.Type.PAWN; int pawnColor = Piece.getColor(pawnPiece); // Generate only capturing moves first (i = 1) for (int i = 1; i < moveDeltaPawn[pawnColor].length; ++i) { int delta = moveDeltaPawn[pawnColor][i]; int targetSquare = pawnSquare + delta; if (Square.isLegal(targetSquare)) { int targetPiece = board.board[targetSquare]; if (targetPiece != Piece.NOPIECE) { if (Piece.getColor(targetPiece) == Color.opposite(pawnColor) && Piece.getType(targetPiece) != Piece.Type.KING) { // Capturing move if ((pawnColor == Color.WHITE && Square.getRank(targetSquare) == Rank.R8) || (pawnColor == Color.BLACK && Square.getRank(targetSquare) == Rank.R1)) { // Pawn promotion capturing move list.entries[list.size++].move = Move.valueOf(Move.Type.PAWNPROMOTION, pawnSquare, targetSquare, pawnPiece, targetPiece, Piece.Type.QUEEN); list.entries[list.size++].move = Move.valueOf(Move.Type.PAWNPROMOTION, pawnSquare, targetSquare, pawnPiece, targetPiece, Piece.Type.ROOK); list.entries[list.size++].move = Move.valueOf(Move.Type.PAWNPROMOTION, pawnSquare, targetSquare, pawnPiece, targetPiece, Piece.Type.BISHOP); list.entries[list.size++].move = Move.valueOf(Move.Type.PAWNPROMOTION, pawnSquare, targetSquare, pawnPiece, targetPiece, Piece.Type.KNIGHT); } else { // Normal capturing move list.entries[list.size++].move = Move.valueOf(Move.Type.NORMAL, pawnSquare, targetSquare, pawnPiece, targetPiece, Piece.Type.NOTYPE); } } } else if (targetSquare == board.enPassant) { // En passant move assert (pawnColor == Color.BLACK && Square.getRank(targetSquare) == Rank.R3) || (pawnColor == Color.WHITE && Square.getRank(targetSquare) == Rank.R6); int captureSquare = targetSquare + (pawnColor == Color.WHITE ? Square.deltaS : Square.deltaN); targetPiece = board.board[captureSquare]; assert Piece.getType(targetPiece) == Piece.Type.PAWN; assert Piece.getColor(targetPiece) == Color.opposite(pawnColor); list.entries[list.size++].move = Move.valueOf(Move.Type.ENPASSANT, pawnSquare, targetSquare, pawnPiece, targetPiece, Piece.Type.NOTYPE); } } } // Generate non-capturing moves int delta = moveDeltaPawn[pawnColor][0]; // Move one rank forward int targetSquare = pawnSquare + delta; if (Square.isLegal(targetSquare) && board.board[targetSquare] == Piece.NOPIECE) { if ((pawnColor == Color.WHITE && Square.getRank(targetSquare) == Rank.R8) || (pawnColor == Color.BLACK && Square.getRank(targetSquare) == Rank.R1)) { // Pawn promotion move list.entries[list.size++].move = Move.valueOf(Move.Type.PAWNPROMOTION, pawnSquare, targetSquare, pawnPiece, Piece.NOPIECE, Piece.Type.QUEEN); list.entries[list.size++].move = Move.valueOf(Move.Type.PAWNPROMOTION, pawnSquare, targetSquare, pawnPiece, Piece.NOPIECE, Piece.Type.ROOK); list.entries[list.size++].move = Move.valueOf(Move.Type.PAWNPROMOTION, pawnSquare, targetSquare, pawnPiece, Piece.NOPIECE, Piece.Type.BISHOP); list.entries[list.size++].move = Move.valueOf(Move.Type.PAWNPROMOTION, pawnSquare, targetSquare, pawnPiece, Piece.NOPIECE, Piece.Type.KNIGHT); } else { // Normal move list.entries[list.size++].move = Move.valueOf(Move.Type.NORMAL, pawnSquare, targetSquare, pawnPiece, Piece.NOPIECE, Piece.Type.NOTYPE); // Move another rank forward targetSquare += delta; if (Square.isLegal(targetSquare) && board.board[targetSquare] == Piece.NOPIECE) { if ((pawnColor == Color.WHITE && Square.getRank(targetSquare) == Rank.R4) || (pawnColor == Color.BLACK && Square.getRank(targetSquare) == Rank.R5)) { // Pawn double move list.entries[list.size++].move = Move.valueOf(Move.Type.PAWNDOUBLE, pawnSquare, targetSquare, pawnPiece, Piece.NOPIECE, Piece.Type.NOTYPE); } } } } } private void addCastlingMoves(MoveList list, int kingSquare) { assert list != null; assert Square.isValid(kingSquare); int kingPiece = board.board[kingSquare]; assert Piece.isValid(kingPiece); assert Piece.getType(kingPiece) == Piece.Type.KING; if (Piece.getColor(kingPiece) == Color.WHITE) { // Do not test g1 whether it is attacked as we will test it in isLegal() if (board.colorCastling[Color.WHITE][Castling.KINGSIDE] != File.NOFILE && board.board[Square.f1] == Piece.NOPIECE && board.board[Square.g1] == Piece.NOPIECE && !board.isAttacked(Square.f1, Color.BLACK)) { assert board.board[Square.e1] == Piece.WHITEKING; assert board.board[Square.h1] == Piece.WHITEROOK; list.entries[list.size++].move = Move.valueOf(Move.Type.CASTLING, kingSquare, Square.g1, kingPiece, Piece.NOPIECE, Piece.Type.NOTYPE); } // Do not test c1 whether it is attacked as we will test it in isLegal() if (board.colorCastling[Color.WHITE][Castling.QUEENSIDE] != File.NOFILE && board.board[Square.b1] == Piece.NOPIECE && board.board[Square.c1] == Piece.NOPIECE && board.board[Square.d1] == Piece.NOPIECE && !board.isAttacked(Square.d1, Color.BLACK)) { assert board.board[Square.e1] == Piece.WHITEKING; assert board.board[Square.a1] == Piece.WHITEROOK; list.entries[list.size++].move = Move.valueOf(Move.Type.CASTLING, kingSquare, Square.c1, kingPiece, Piece.NOPIECE, Piece.Type.NOTYPE); } } else { // Do not test g8 whether it is attacked as we will test it in isLegal() if (board.colorCastling[Color.BLACK][Castling.KINGSIDE] != File.NOFILE && board.board[Square.f8] == Piece.NOPIECE && board.board[Square.g8] == Piece.NOPIECE && !board.isAttacked(Square.f8, Color.WHITE)) { assert board.board[Square.e8] == Piece.BLACKKING; assert board.board[Square.h8] == Piece.BLACKROOK; list.entries[list.size++].move = Move.valueOf(Move.Type.CASTLING, kingSquare, Square.g8, kingPiece, Piece.NOPIECE, Piece.Type.NOTYPE); } // Do not test c8 whether it is attacked as we will test it in isLegal() if (board.colorCastling[Color.BLACK][Castling.QUEENSIDE] != File.NOFILE && board.board[Square.b8] == Piece.NOPIECE && board.board[Square.c8] == Piece.NOPIECE && board.board[Square.d8] == Piece.NOPIECE && !board.isAttacked(Square.d8, Color.WHITE)) { assert board.board[Square.e8] == Piece.BLACKKING; assert board.board[Square.a8] == Piece.BLACKROOK; list.entries[list.size++].move = Move.valueOf(Move.Type.CASTLING, kingSquare, Square.c8, kingPiece, Piece.NOPIECE, Piece.Type.NOTYPE); } } } private boolean isLegal(int move) { int activeColor = board.activeColor; board.makeMove(move); boolean isAttacked = board.isAttacked(Bitboard.next(board.kings[activeColor].squares), Color.opposite(activeColor)); board.undoMove(move); return !isAttacked; } }
src/main/java/com/fluxchess/pulse/MoveGenerator.java
/* * Copyright 2013-2014 the original author or authors. * * This file is part of Pulse Chess. * * Pulse Chess is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Pulse Chess is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Pulse Chess. If not, see <http://www.gnu.org/licenses/>. */ package com.fluxchess.pulse; public final class MoveGenerator { // Move deltas public static final int[][] moveDeltaPawn = { {Square.deltaN, Square.deltaNE, Square.deltaNW}, // Color.WHITE {Square.deltaS, Square.deltaSE, Square.deltaSW} // Color.BLACK }; public static final int[] moveDeltaKnight = { Square.deltaN + Square.deltaN + Square.deltaE, Square.deltaN + Square.deltaN + Square.deltaW, Square.deltaN + Square.deltaE + Square.deltaE, Square.deltaN + Square.deltaW + Square.deltaW, Square.deltaS + Square.deltaS + Square.deltaE, Square.deltaS + Square.deltaS + Square.deltaW, Square.deltaS + Square.deltaE + Square.deltaE, Square.deltaS + Square.deltaW + Square.deltaW }; public static final int[] moveDeltaBishop = { Square.deltaNE, Square.deltaNW, Square.deltaSE, Square.deltaSW }; public static final int[] moveDeltaRook = { Square.deltaN, Square.deltaE, Square.deltaS, Square.deltaW }; public static final int[] moveDeltaQueen = { Square.deltaN, Square.deltaE, Square.deltaS, Square.deltaW, Square.deltaNE, Square.deltaNW, Square.deltaSE, Square.deltaSW }; public static final int[] moveDeltaKing = { Square.deltaN, Square.deltaE, Square.deltaS, Square.deltaW, Square.deltaNE, Square.deltaNW, Square.deltaSE, Square.deltaSW }; // We will store a MoveGenerator for each ply so we don't have to create them // in search. (which is expensive) private static final MoveGenerator[] moveGenerators = new MoveGenerator[Search.MAX_HEIGHT]; // We will use a staged move generation so we can easily extend it with // other features like transposition tables. private static final State[] mainStates = {State.BEGIN, State.MAIN, State.END}; private static final State[] quiescentStates = {State.BEGIN, State.QUIESCENT, State.END}; private Board board = null; private boolean isCheck = false; private State[] states = null; private int currentStateIndex = 0; private final MoveList moveList = new MoveList(); private int currentMoveIndex = 0; private static enum State { BEGIN, MAIN, QUIESCENT, END } static { for (int i = 0; i < Search.MAX_HEIGHT; ++i) { moveGenerators[i] = new MoveGenerator(); } } public static MoveGenerator getMoveGenerator(Board board, int depth, int height, boolean isCheck) { assert board != null; assert height >= 0 && height <= Search.MAX_HEIGHT; MoveGenerator moveGenerator = moveGenerators[height]; moveGenerator.board = board; moveGenerator.isCheck = isCheck; moveGenerator.currentStateIndex = 0; moveGenerator.moveList.size = 0; moveGenerator.currentMoveIndex = 0; if (depth > 0) { moveGenerator.states = mainStates; } else { moveGenerator.states = quiescentStates; } return moveGenerator; } private MoveGenerator() { } /** * Returns the next legal move. We will go through our states and generate * the appropriate moves for the current state. * * @return the next legal move, or Move.NOMOVE if there's is no next move. */ public int next() { while (true) { // Check whether we have any move in the list if (currentMoveIndex < moveList.size) { int move = moveList.entries[currentMoveIndex++].move; switch (states[currentStateIndex]) { case MAIN: // Discard all non-legal moves if (!isLegal(move)) { continue; } break; case QUIESCENT: // Discard all non-legal moves. If not in check return only capturing moves. if (!isLegal(move) || (!isCheck && Move.getTargetPiece(move) == Piece.NOPIECE)) { continue; } break; default: assert false : states[currentStateIndex]; break; } return move; } // If we don't have any move in the list, lets generate the moves for the // next state. ++currentStateIndex; currentMoveIndex = 0; moveList.size = 0; // We simply generate all moves at once here. However we could also // generate capturing moves first and then all non-capturing moves. switch (states[currentStateIndex]) { case MAIN: addDefaultMoves(moveList); if (!isCheck) { int square = Bitboard.next(board.kings[board.activeColor].squares); addCastlingMoves(moveList, square); } moveList.rateFromMVVLVA(); moveList.sort(); break; case QUIESCENT: addDefaultMoves(moveList); moveList.rateFromMVVLVA(); moveList.sort(); break; case END: return Move.NOMOVE; default: assert false; break; } } } private void addDefaultMoves(MoveList list) { assert list != null; int activeColor = board.activeColor; for (long squares = board.pawns[activeColor].squares; squares != 0; squares &= squares - 1) { int square = Bitboard.next(squares); addPawnMoves(list, square); } for (long squares = board.knights[activeColor].squares; squares != 0; squares &= squares - 1) { int square = Bitboard.next(squares); addMoves(list, square, moveDeltaKnight); } for (long squares = board.bishops[activeColor].squares; squares != 0; squares &= squares - 1) { int square = Bitboard.next(squares); addMoves(list, square, moveDeltaBishop); } for (long squares = board.rooks[activeColor].squares; squares != 0; squares &= squares - 1) { int square = Bitboard.next(squares); addMoves(list, square, moveDeltaRook); } for (long squares = board.queens[activeColor].squares; squares != 0; squares &= squares - 1) { int square = Bitboard.next(squares); addMoves(list, square, moveDeltaQueen); } int square = Bitboard.next(board.kings[activeColor].squares); addMoves(list, square, moveDeltaKing); } private void addMoves(MoveList list, int originSquare, int[] moveDelta) { assert list != null; assert Square.isValid(originSquare); assert moveDelta != null; int originPiece = board.board[originSquare]; assert Piece.isValid(originPiece); boolean sliding = Piece.Type.isSliding(Piece.getType(originPiece)); int oppositeColor = Color.opposite(Piece.getColor(originPiece)); for (int delta : moveDelta) { int targetSquare = originSquare + delta; while (Square.isLegal(targetSquare)) { int targetPiece = board.board[targetSquare]; if (targetPiece == Piece.NOPIECE) { list.entries[list.size++].move = Move.valueOf(Move.Type.NORMAL, originSquare, targetSquare, originPiece, Piece.NOPIECE, Piece.Type.NOTYPE); if (!sliding) { break; } targetSquare += delta; } else { if (Piece.getColor(targetPiece) == oppositeColor && Piece.getType(targetPiece) != Piece.Type.KING) { list.entries[list.size++].move = Move.valueOf(Move.Type.NORMAL, originSquare, targetSquare, originPiece, targetPiece, Piece.Type.NOTYPE); } break; } } } } private void addPawnMoves(MoveList list, int pawnSquare) { assert list != null; assert Square.isValid(pawnSquare); int pawnPiece = board.board[pawnSquare]; assert Piece.isValid(pawnPiece); assert Piece.getType(pawnPiece) == Piece.Type.PAWN; int pawnColor = Piece.getColor(pawnPiece); // Generate only capturing moves first (i = 1) for (int i = 1; i < moveDeltaPawn[pawnColor].length; ++i) { int delta = moveDeltaPawn[pawnColor][i]; int targetSquare = pawnSquare + delta; if (Square.isLegal(targetSquare)) { int targetPiece = board.board[targetSquare]; if (targetPiece != Piece.NOPIECE) { if (Piece.getColor(targetPiece) == Color.opposite(pawnColor) && Piece.getType(targetPiece) != Piece.Type.KING) { // Capturing move if ((pawnColor == Color.WHITE && Square.getRank(targetSquare) == Rank.R8) || (pawnColor == Color.BLACK && Square.getRank(targetSquare) == Rank.R1)) { // Pawn promotion capturing move list.entries[list.size++].move = Move.valueOf(Move.Type.PAWNPROMOTION, pawnSquare, targetSquare, pawnPiece, targetPiece, Piece.Type.QUEEN); list.entries[list.size++].move = Move.valueOf(Move.Type.PAWNPROMOTION, pawnSquare, targetSquare, pawnPiece, targetPiece, Piece.Type.ROOK); list.entries[list.size++].move = Move.valueOf(Move.Type.PAWNPROMOTION, pawnSquare, targetSquare, pawnPiece, targetPiece, Piece.Type.BISHOP); list.entries[list.size++].move = Move.valueOf(Move.Type.PAWNPROMOTION, pawnSquare, targetSquare, pawnPiece, targetPiece, Piece.Type.KNIGHT); } else { // Normal capturing move list.entries[list.size++].move = Move.valueOf(Move.Type.NORMAL, pawnSquare, targetSquare, pawnPiece, targetPiece, Piece.Type.NOTYPE); } } } else if (targetSquare == board.enPassant) { // En passant move assert (pawnColor == Color.BLACK && Square.getRank(targetSquare) == Rank.R3) || (pawnColor == Color.WHITE && Square.getRank(targetSquare) == Rank.R6); int captureSquare = targetSquare + (pawnColor == Color.WHITE ? Square.deltaS : Square.deltaN); targetPiece = board.board[captureSquare]; assert Piece.getType(targetPiece) == Piece.Type.PAWN; assert Piece.getColor(targetPiece) == Color.opposite(pawnColor); list.entries[list.size++].move = Move.valueOf(Move.Type.ENPASSANT, pawnSquare, targetSquare, pawnPiece, targetPiece, Piece.Type.NOTYPE); } } } // Generate non-capturing moves int delta = moveDeltaPawn[pawnColor][0]; // Move one rank forward int targetSquare = pawnSquare + delta; if (Square.isLegal(targetSquare) && board.board[targetSquare] == Piece.NOPIECE) { if ((pawnColor == Color.WHITE && Square.getRank(targetSquare) == Rank.R8) || (pawnColor == Color.BLACK && Square.getRank(targetSquare) == Rank.R1)) { // Pawn promotion move list.entries[list.size++].move = Move.valueOf(Move.Type.PAWNPROMOTION, pawnSquare, targetSquare, pawnPiece, Piece.NOPIECE, Piece.Type.QUEEN); list.entries[list.size++].move = Move.valueOf(Move.Type.PAWNPROMOTION, pawnSquare, targetSquare, pawnPiece, Piece.NOPIECE, Piece.Type.ROOK); list.entries[list.size++].move = Move.valueOf(Move.Type.PAWNPROMOTION, pawnSquare, targetSquare, pawnPiece, Piece.NOPIECE, Piece.Type.BISHOP); list.entries[list.size++].move = Move.valueOf(Move.Type.PAWNPROMOTION, pawnSquare, targetSquare, pawnPiece, Piece.NOPIECE, Piece.Type.KNIGHT); } else { // Normal move list.entries[list.size++].move = Move.valueOf(Move.Type.NORMAL, pawnSquare, targetSquare, pawnPiece, Piece.NOPIECE, Piece.Type.NOTYPE); // Move another rank forward targetSquare += delta; if (Square.isLegal(targetSquare) && board.board[targetSquare] == Piece.NOPIECE) { if ((pawnColor == Color.WHITE && Square.getRank(targetSquare) == Rank.R4) || (pawnColor == Color.BLACK && Square.getRank(targetSquare) == Rank.R5)) { // Pawn double move list.entries[list.size++].move = Move.valueOf(Move.Type.PAWNDOUBLE, pawnSquare, targetSquare, pawnPiece, Piece.NOPIECE, Piece.Type.NOTYPE); } } } } } private void addCastlingMoves(MoveList list, int kingSquare) { assert list != null; assert Square.isValid(kingSquare); int kingPiece = board.board[kingSquare]; assert Piece.isValid(kingPiece); assert Piece.getType(kingPiece) == Piece.Type.KING; if (Piece.getColor(kingPiece) == Color.WHITE) { // Do not test g1 whether it is attacked as we will test it in isLegal() if (board.colorCastling[Color.WHITE][Castling.KINGSIDE] != File.NOFILE && board.board[Square.f1] == Piece.NOPIECE && board.board[Square.g1] == Piece.NOPIECE && !board.isAttacked(Square.f1, Color.BLACK)) { assert board.board[Square.e1] == Piece.WHITEKING; assert board.board[Square.h1] == Piece.WHITEROOK; list.entries[list.size++].move = Move.valueOf(Move.Type.CASTLING, kingSquare, Square.g1, kingPiece, Piece.NOPIECE, Piece.Type.NOTYPE); } // Do not test c1 whether it is attacked as we will test it in isLegal() if (board.colorCastling[Color.WHITE][Castling.QUEENSIDE] != File.NOFILE && board.board[Square.b1] == Piece.NOPIECE && board.board[Square.c1] == Piece.NOPIECE && board.board[Square.d1] == Piece.NOPIECE && !board.isAttacked(Square.d1, Color.BLACK)) { assert board.board[Square.e1] == Piece.WHITEKING; assert board.board[Square.a1] == Piece.WHITEROOK; list.entries[list.size++].move = Move.valueOf(Move.Type.CASTLING, kingSquare, Square.c1, kingPiece, Piece.NOPIECE, Piece.Type.NOTYPE); } } else { // Do not test g8 whether it is attacked as we will test it in isLegal() if (board.colorCastling[Color.BLACK][Castling.KINGSIDE] != File.NOFILE && board.board[Square.f8] == Piece.NOPIECE && board.board[Square.g8] == Piece.NOPIECE && !board.isAttacked(Square.f8, Color.WHITE)) { assert board.board[Square.e8] == Piece.BLACKKING; assert board.board[Square.h8] == Piece.BLACKROOK; list.entries[list.size++].move = Move.valueOf(Move.Type.CASTLING, kingSquare, Square.g8, kingPiece, Piece.NOPIECE, Piece.Type.NOTYPE); } // Do not test c8 whether it is attacked as we will test it in isLegal() if (board.colorCastling[Color.BLACK][Castling.QUEENSIDE] != File.NOFILE && board.board[Square.b8] == Piece.NOPIECE && board.board[Square.c8] == Piece.NOPIECE && board.board[Square.d8] == Piece.NOPIECE && !board.isAttacked(Square.d8, Color.WHITE)) { assert board.board[Square.e8] == Piece.BLACKKING; assert board.board[Square.a8] == Piece.BLACKROOK; list.entries[list.size++].move = Move.valueOf(Move.Type.CASTLING, kingSquare, Square.c8, kingPiece, Piece.NOPIECE, Piece.Type.NOTYPE); } } } private boolean isLegal(int move) { int activeColor = board.activeColor; board.makeMove(move); boolean isAttacked = board.isAttacked(Bitboard.next(board.kings[activeColor].squares), Color.opposite(activeColor)); board.undoMove(move); return !isAttacked; } }
Fix Search.MAX_HEIGHT bound usage
src/main/java/com/fluxchess/pulse/MoveGenerator.java
Fix Search.MAX_HEIGHT bound usage
Java
mit
f170503deff19fcd3b0a2e9df7242a51a6625144
0
fiveham/Sudoku_Solver
package common; import java.util.*; public class Universe<E> { private final List<E> ie; private final Map<E,Integer> ei; public Universe(Collection<? extends E> c){ this.ie = new ArrayList<E>(c); this.ei = new HashMap<>(); for(int i=0; i<ei.size(); ++i){ ei.put(ie.get(i), i); } } public boolean contains(Object o){ return ei.containsKey(o); } public int size(){ return ie.size(); } public E get(int i){ return ie.get(i); } public int index(E e){ return ei.get(e); } public BackedSet<E> set(Collection<? extends E> c){ return new BackedSet<E>(this,c); } public boolean equals(Object o){ if(o instanceof Universe<?>){ Universe<?> u = (Universe<?>) o; return ie.equals(u.ie) && ei.equals(u.ei); } return false; } /** * <p>A convenience method returning an empty BackedSet backed * by this Universe.</p> * @return an empty BackedSet backed by this Universe */ public BackedSet<E> back(){ return new BackedSet<>(this); } /** * <p>A convenience method returning a BackedSet backed by this * Universe. containing the contents of {@code c}.</p> * @return BackedSet backed by this Universe. containing the * contents of {@code c} */ public BackedSet<E> back(Collection<? extends E> c){ return new BackedSet<>(this, c); } }
src/common/Universe.java
package common; import java.util.*; public class Universe<E> { private final List<E> ie; private final Map<E,Integer> ei; public Universe(Collection<? extends E> c){ this.ie = new ArrayList<E>(c); this.ei = new HashMap<>(); for(int i=0; i<ei.size(); ++i){ ei.put(ie.get(i), i); } } public boolean contains(Object o){ return ei.containsKey(o); } public int size(){ return ie.size(); } public E get(int i){ return ie.get(i); } public int index(E e){ return ei.get(e); } public BackedSet<E> set(Collection<? extends E> c){ return new BackedSet<E>(this,c); } public boolean equals(Object o){ if(o instanceof Universe<?>){ Universe<?> u = (Universe<?>) o; return ie.equals(u.ie) && ei.equals(u.ei); } return false; } /** * <p>A convenience method returning an empty BackedSet backed * by this Universe.</p> * @return an empty BackedSet backed by this Universe */ public BackedSet<E> back(){ return new BackedSet<>(this); } /** * <p>A convenience method returning a BackedSet backed by this * Universe. containing the contents of {@code c}.</p> * @returna BackedSet backed by this Universe. containing the * contents of {@code c} */ public BackedSet<E> back(Collection<? extends E> c){ return new BackedSet<>(this, c); } }
fix typo
src/common/Universe.java
fix typo
Java
mit
d04f1b6c0fc79a51b3089a12f85ea72ee7a3b26c
0
if-056-java/animalsRepo,if-056-java/animalsRepo,if-056-java/animalsRepo
package com.animals.app.controller.resource; import java.io.IOException; import java.sql.Date; import java.util.ArrayList; import java.util.List; import java.util.StringTokenizer; import javax.annotation.security.PermitAll; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriBuilder; import org.apache.commons.codec.binary.Base64; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; import org.scribe.builder.ServiceBuilder; import org.scribe.builder.api.Google2Api; import org.scribe.model.OAuthRequest; import org.scribe.model.Token; import org.scribe.model.Verb; import org.scribe.model.Verifier; import org.scribe.oauth.OAuthService; import com.animals.app.domain.User; import com.animals.app.domain.UserRole; import com.animals.app.domain.UserType; import com.animals.app.repository.Impl.UserRepositoryImpl; /** * Created by 41X on 8/16/2015. */ @Path("account") @PermitAll public class OAuthAuthorizationResource { private final Response BAD_REQUEST = Response.status(Response.Status.BAD_REQUEST).build(); private final Response NOT_FOUND = Response.status(Response.Status.NOT_FOUND).build(); private final Response SERVER_ERROR = Response.status(Response.Status.INTERNAL_SERVER_ERROR).build(); private UserRepositoryImpl userRep = new UserRepositoryImpl(); // Google OAuth preferences private static final String PROTECTED_RESOURCE_URL = "https://www.googleapis.com/oauth2/v1/userinfo?alt=json"; private static final String SCOPE = "https://mail.google.com/ https://www.googleapis.com/auth/userinfo.email"; private static final Token EMPTY_TOKEN = null; private static final String apiKeyG = "1061082540782-02vuauouhb8v5caiavepvgkuuiv4t178.apps.googleusercontent.com"; private static final String apiSecretG = "rYsnWUSHf4S2z-LHM1oMocJT"; private static final String callbackUrlGPath = "webapi/account/login/google_token"; @GET @Path("login/google") //http:localhost:8080/webapi/account/login/google public Response googleLogin(@Context HttpServletRequest req) { //Define URLs and callback String pathAll = req.getRequestURL().toString(); String pathMain =pathAll.replace("webapi/account/login/google", ""); String callbackUrlG = pathMain + callbackUrlGPath; OAuthService service = null; try { service = new ServiceBuilder() .provider(Google2Api.class) .apiKey(apiKeyG) .apiSecret(apiSecretG) .callback(callbackUrlG) .scope(SCOPE) .offline(true) .build(); } catch (Exception e) { e.printStackTrace(); } if (service == null) { return Response.status(404).build(); } String authorizationUrl = service.getAuthorizationUrl(EMPTY_TOKEN); System.out.println("url - " + authorizationUrl); return Response.status(Response.Status.OK).entity(authorizationUrl).build(); } @GET @Path("login/google_token") //http://localhost:8080/webapi/account/login/google_token public Response getGoogleAccessToken(@QueryParam("code") String token, @QueryParam("error") String error, @Context HttpServletRequest req) { //Define URLs and callback String pathAll = req.getRequestURL().toString(); String pathMain =pathAll.replace("webapi/account/login/google_token", ""); String successURL = pathMain + "#/ua/user/profile"; String callbackUrlG = pathMain + callbackUrlGPath; if(error!=null){ String entryUrl= pathMain + "/#/ua/user/login"; return Response.temporaryRedirect(UriBuilder.fromUri(entryUrl).build()).build(); } Verifier v = new Verifier(token); System.out.println("token - " + token); OAuthService service2 =null; try { service2 = new ServiceBuilder() .provider(Google2Api.class) .apiKey(apiKeyG) .apiSecret(apiSecretG) .callback(callbackUrlG) .scope(SCOPE) .offline(true) .build(); } catch (Exception e1) { e1.printStackTrace(); } Token accessToken = new Token("ACCESS_TOKEN", "REFRESH_TOKEN"); accessToken = service2.getAccessToken(EMPTY_TOKEN, v); String refreshGoogleToken = accessToken.getSecret(); String accessGoogleToken = accessToken.getToken(); //Request protected resource OAuthRequest request = new OAuthRequest(Verb.GET, PROTECTED_RESOURCE_URL); service2.signRequest(accessToken, request); org.scribe.model.Response response = request.send(); System.out.println(response.getCode()); System.out.println(response.getBody()); //JSON string from Google response String json = response.getBody(); //parse string String googleId=null; String name=null; String link=null; String email=null; try { JSONParser jsonParser = new JSONParser(); JSONObject jsonObject = (JSONObject) jsonParser.parse(json); name = (String) jsonObject.get("name"); System.out.println("The first name is: " + name); googleId = (String) jsonObject.get("id"); System.out.println("id is: " + googleId); link = (String) jsonObject.get("picture"); System.out.println("Link to photo: " + link); email = (String) jsonObject.get("email"); System.out.println("Email: " + email); } catch (ParseException e) { e.printStackTrace(); } catch (NullPointerException ex) { ex.printStackTrace(); } //getting userId from current session HttpSession session = req.getSession(true); //CASE 1: Editing user profile from MyCabinet. Check if session has parameters if(session.getAttribute("userId") != null){ //Check if user exist by googleId. If exist - we can't join accounts - will be error. //ERROR - when login - two accounts with the same GoogleID User existUserWithGoogleId=null; try { existUserWithGoogleId = userRep.getByGoogleId(googleId); } catch (Exception e) { return SERVER_ERROR; } if (existUserWithGoogleId != null) { //add params to redirect URL to inform frontend that account is already in use //by another user String errorUrl= successURL + "?join=error"; return Response.temporaryRedirect(UriBuilder.fromUri(errorUrl).build()).build(); } int userId = Integer.parseInt((String)session.getAttribute("userId")); System.out.println(userId); //insert in User value of googleId and picture by userId User user=null; try { user = userRep.getById(userId); user.setGoogleId(googleId); user.setSocialPhoto(link); userRep.update(user); } catch (Exception e) { return SERVER_ERROR; } session.setAttribute("successMesage", "Successful joining Google account"); session.setAttribute("user", user); session.setAttribute("refreshGoogleToken", refreshGoogleToken); return Response.temporaryRedirect(UriBuilder.fromUri(successURL).build()).build(); } //CASE 2: Login to site. Session is not set. Find User by googleId //CASE 3: Registration. Session is not set. Create User with GoogleId and SocialPhoto //Check if user exist by googleId User user=null; try { user = userRep.getByGoogleId(googleId); } catch (Exception e) { return SERVER_ERROR; } if (user != null) { //Case 2 //creating Session for founded user. Setting params System.out.println("creating session"); setUpSuccessSession(user, session, "success login with GoogleId"); session.setAttribute("refreshGoogleToken", refreshGoogleToken); //Entering to site with Session return Response.temporaryRedirect(UriBuilder.fromUri(successURL).build()).build(); } //else CASE 3 //creating User to register User userToReg = new User(); String userLogin; if (name!=null && !name.isEmpty()) { userLogin = name; } else { userLogin = "unknown"; } userToReg.setName(userLogin); userToReg.setSocialLogin(userLogin); userToReg.setSurname("N/A"); userToReg.setEmail(email); userToReg.setActive(true); userToReg.setAddress("N/A"); userToReg.setPhone("N/A"); userToReg.setOrganizationInfo("N/A"); userToReg.setOrganizationName("N/A"); userToReg.setPassword(googleId); userToReg.setSocialPhoto(link); userToReg.setGoogleId(googleId); UserRole userRole = new UserRole(); userRole.setRole("гість"); userRole.setId(3); List<UserRole> list = new ArrayList<UserRole>(); list.add(userRole); userToReg.setUserRole(list); UserType userType = new UserType(); userType.setId(1); userToReg.setUserType(userType); Date currentDate = new Date(new java.util.Date().getTime()); System.out.println(currentDate); userToReg.setRegistrationDate(currentDate); //inserting user to DB try { userRep.insert(userToReg); } catch (Exception e) { return SERVER_ERROR; } //creating session setUpSuccessSession(userToReg, session, "successful Registration with GoogleId"); session.setAttribute("refreshGoogleToken", refreshGoogleToken); //session.setAttribute("user", userToReg); //Entering to site with Session return Response.temporaryRedirect(UriBuilder.fromUri(successURL).build()).build(); } @GET @Path("login/google_login_direct") //http://localhost:8080/webapi/account/login/google_login_direct public Response directGoogleLoginWithOldAccessToken(@Context HttpServletRequest req, @QueryParam("code") String refreshGoogleToken) { //Define URLs and callback String pathAll = req.getRequestURL().toString(); String pathMain =pathAll.replace("webapi/account/login/google_login_direct", ""); String successURL = pathMain + "#/ua/user/profile"; String callbackUrlG = pathMain + callbackUrlGPath; System.out.println("Google refresh token - "+ refreshGoogleToken); //getting new access token with old refreshToken OAuthRequest request = new OAuthRequest(Verb.POST, "https://www.googleapis.com/oauth2/v3/token"); request.addBodyParameter("grant_type", "refresh_token"); request.addBodyParameter("refresh_token", refreshGoogleToken); // were accessToken is the Token object you want to refresh. request.addBodyParameter("client_id", apiKeyG); request.addBodyParameter("client_secret", apiSecretG); org.scribe.model.Response response = request.send(); System.out.println(response.getCode()); //200 - success System.out.println(response.getBody()); //JSON response //JSON string from Google response String json = response.getBody(); //parse string String new_access_token=null; try { JSONParser jsonParser = new JSONParser(); JSONObject jsonObject = (JSONObject) jsonParser.parse(json); new_access_token = (String) jsonObject.get("access_token"); System.out.println("new accessToken is: " + new_access_token); } catch (ParseException e) { e.printStackTrace(); } catch (NullPointerException ex) { ex.printStackTrace(); } //New request to protected resource with new accessToken and old refreshToken OAuthService service2 =null; try { service2 = new ServiceBuilder() .provider(Google2Api.class) .apiKey(apiKeyG) .apiSecret(apiSecretG) .callback(callbackUrlG) .scope(SCOPE) .offline(true) .build(); } catch (Exception e1) { e1.printStackTrace(); } Token accessToken = new Token(new_access_token, refreshGoogleToken); //Request protected resource OAuthRequest request2 = new OAuthRequest(Verb.GET, PROTECTED_RESOURCE_URL); service2.signRequest(accessToken, request2); org.scribe.model.Response response2 = request2.send(); System.out.println(response2.getCode()); //200 - success System.out.println(response2.getBody()); //JSON response //JSON string from Google response String json2 = response2.getBody(); //parse string String googleId=null; try { JSONParser jsonParser = new JSONParser(); JSONObject jsonObject = (JSONObject) jsonParser.parse(json2); googleId = (String) jsonObject.get("id"); System.out.println("id is: " + googleId); } catch (ParseException e) { e.printStackTrace(); } catch (NullPointerException ex) { ex.printStackTrace(); } //Login to site. Session is not set. Find User by googleId HttpSession sessionNew = req.getSession(true); //Check if user exist by googleId User user=null; try { user = userRep.getByGoogleId(googleId); System.out.println(user); } catch (Exception e) { return SERVER_ERROR; } if (user == null) { return NOT_FOUND; } //creating Session for founded user. Setting params setUpSuccessSession(user, sessionNew, "success direct login with GoogleId"); sessionNew.setAttribute("refreshGoogleToken", refreshGoogleToken); return Response.status(Response.Status.OK).entity(successURL).build(); } private static void setUpSuccessSession(User user, HttpSession session, String success){ session.setAttribute("userName",user.getName()); session.setAttribute("userId",user.getId().toString()); session.setAttribute("userSurname",user.getSurname()); session.setAttribute("socialLogin",user.getSocialLogin()); session.setAttribute("userRoleId",user.getUserRole().get(0).getId().toString()); session.setAttribute("userRole",user.getUserRole().get(0).getRole()); session.setAttribute("successMesage", success); session.setAttribute("user", user); //creating string for accessToken String accessToken = (String)session.getId() + ":" + (String)session.getAttribute("userId"); System.out.println("decoded accesToken - " + accessToken); String accessTokenEncoded=null; try { byte[] encoded = Base64.encodeBase64(accessToken.getBytes()); accessTokenEncoded = new String(encoded, "UTF-8"); } catch (IOException e) { e.printStackTrace(); } System.out.println("encoded accessToken -" + accessTokenEncoded); session.setAttribute("accessToken", accessTokenEncoded); } }
AnimalWebApp/src/main/java/com/animals/app/controller/resource/OAuthAuthorizationResource.java
package com.animals.app.controller.resource; import java.io.IOException; import java.sql.Date; import java.util.ArrayList; import java.util.List; import java.util.StringTokenizer; import javax.annotation.security.PermitAll; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriBuilder; import org.apache.commons.codec.binary.Base64; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; import org.scribe.builder.ServiceBuilder; import org.scribe.builder.api.Google2Api; import org.scribe.model.OAuthRequest; import org.scribe.model.Token; import org.scribe.model.Verb; import org.scribe.model.Verifier; import org.scribe.oauth.OAuthService; import com.animals.app.domain.User; import com.animals.app.domain.UserRole; import com.animals.app.domain.UserType; import com.animals.app.repository.Impl.UserRepositoryImpl; /** * Created by 41X on 8/16/2015. */ @Path("account") @PermitAll public class OAuthAuthorizationResource { private final Response BAD_REQUEST = Response.status(Response.Status.BAD_REQUEST).build(); private final Response NOT_FOUND = Response.status(Response.Status.NOT_FOUND).build(); private final Response SERVER_ERROR = Response.status(Response.Status.INTERNAL_SERVER_ERROR).build(); private UserRepositoryImpl userRep = new UserRepositoryImpl(); // Google OAuth preferences private static final String PROTECTED_RESOURCE_URL = "https://www.googleapis.com/oauth2/v1/userinfo?alt=json"; private static final String SCOPE = "https://mail.google.com/ https://www.googleapis.com/auth/userinfo.email"; private static final Token EMPTY_TOKEN = null; private static final String apiKeyG = "1061082540782-02vuauouhb8v5caiavepvgkuuiv4t178.apps.googleusercontent.com"; private static final String apiSecretG = "rYsnWUSHf4S2z-LHM1oMocJT"; private static final String callbackUrlGPath = "webapi/account/login/google_token"; @GET @Path("login/google") //http:localhost:8080/webapi/account/login/google public Response googleLogin(@Context HttpServletRequest req) { String pathAll = req.getRequestURL().toString(); String pathMain =pathAll.replace("webapi/account/login/google", ""); System.out.println("pathMain - " + pathMain); String callbackUrlG = pathMain + callbackUrlGPath; System.out.println(callbackUrlG); OAuthService service = null; try { service = new ServiceBuilder() .provider(Google2Api.class) .apiKey(apiKeyG) .apiSecret(apiSecretG) .callback(callbackUrlG) .scope(SCOPE) .offline(true) .build(); } catch (Exception e) { e.printStackTrace(); } if (service == null) { return Response.status(404).build(); } String authorizationUrl = service.getAuthorizationUrl(EMPTY_TOKEN); System.out.println("url - " + authorizationUrl); return Response.status(Response.Status.OK).entity(authorizationUrl).build(); } @GET @Path("login/google_token") //http://localhost:8080/webapi/account/login/google_token public Response getGoogleAccessToken(@QueryParam("code") String token, @QueryParam("error") String error, @Context HttpServletRequest req) { String pathAll = req.getRequestURL().toString(); String pathMain =pathAll.replace("webapi/account/login/google_token", ""); System.out.println("pathMain - " + pathMain); String successURL = pathMain + "#/ua/user/profile"; System.out.println(successURL); String callbackUrlG = pathMain + callbackUrlGPath; System.out.println(callbackUrlG); if(error!=null){ String entryUrl= pathMain + "/#/ua/user/login"; return Response.temporaryRedirect(UriBuilder.fromUri(entryUrl).build()).build(); } Verifier v = new Verifier(token); System.out.println("token - " + token); OAuthService service2 =null; try { service2 = new ServiceBuilder() .provider(Google2Api.class) .apiKey(apiKeyG) .apiSecret(apiSecretG) .callback(callbackUrlG) .scope(SCOPE) .offline(true) .build(); } catch (Exception e1) { e1.printStackTrace(); } Token accessToken = new Token("ACCESS_TOKEN", "REFRESH_TOKEN"); accessToken = service2.getAccessToken(EMPTY_TOKEN, v); String refreshGoogleToken = accessToken.getSecret(); String accessGoogleToken = accessToken.getToken(); //Request protected resource OAuthRequest request = new OAuthRequest(Verb.GET, PROTECTED_RESOURCE_URL); service2.signRequest(accessToken, request); org.scribe.model.Response response = request.send(); System.out.println(response.getCode()); System.out.println(response.getBody()); //JSON string from Google response String json = response.getBody(); //parse string String googleId=null; String name=null; String link=null; String email=null; try { JSONParser jsonParser = new JSONParser(); JSONObject jsonObject = (JSONObject) jsonParser.parse(json); name = (String) jsonObject.get("name"); System.out.println("The first name is: " + name); googleId = (String) jsonObject.get("id"); System.out.println("id is: " + googleId); link = (String) jsonObject.get("picture"); System.out.println("Link to photo: " + link); email = (String) jsonObject.get("email"); System.out.println("Email: " + email); } catch (ParseException e) { e.printStackTrace(); } catch (NullPointerException ex) { ex.printStackTrace(); } //getting userId from current session HttpSession session = req.getSession(true); //CASE 1: Editing user profile from MyCabinet. Check if session has parameters if(session.getAttribute("userId") != null){ //Check if user exist by googleId. If exist - we can't join accounts - will be error. //ERROR - when login - two accounts with the same GoogleID User existUserWithGoogleId=null; try { existUserWithGoogleId = userRep.getByGoogleId(googleId); } catch (Exception e) { return SERVER_ERROR; } if (existUserWithGoogleId != null) { //add params to redirect URL to inform frontend that account is already in use //by another user String errorUrl= successURL + "?join=error"; return Response.temporaryRedirect(UriBuilder.fromUri(errorUrl).build()).build(); } int userId = Integer.parseInt((String)session.getAttribute("userId")); System.out.println(userId); //insert in User value of googleId and picture by userId User user=null; try { user = userRep.getById(userId); user.setGoogleId(googleId); user.setSocialPhoto(link); userRep.update(user); } catch (Exception e) { return SERVER_ERROR; } session.setAttribute("successMesage", "Successful joining Google account"); session.setAttribute("user", user); session.setAttribute("refreshGoogleToken", refreshGoogleToken); return Response.temporaryRedirect(UriBuilder.fromUri(successURL).build()).build(); } //CASE 2: Login to site. Session is not set. Find User by googleId //CASE 3: Registration. Session is not set. Create User with GoogleId and SocialPhoto //Check if user exist by googleId User user=null; try { user = userRep.getByGoogleId(googleId); } catch (Exception e) { return SERVER_ERROR; } if (user != null) { //Case 2 //creating Session for founded user. Setting params System.out.println("creating session"); setUpSuccessSession(user, session, "success login with GoogleId"); session.setAttribute("refreshGoogleToken", refreshGoogleToken); //Entering to site with Session return Response.temporaryRedirect(UriBuilder.fromUri(successURL).build()).build(); } //else CASE 3 //creating User to register User userToReg = new User(); String userLogin; if (name!=null && !name.isEmpty()) { userLogin = name; } else { userLogin = "unknown"; } userToReg.setName(userLogin); userToReg.setSocialLogin(userLogin); userToReg.setSurname("N/A"); userToReg.setEmail(email); userToReg.setActive(true); userToReg.setAddress("N/A"); userToReg.setPhone("N/A"); userToReg.setOrganizationInfo("N/A"); userToReg.setOrganizationName("N/A"); userToReg.setPassword(googleId); userToReg.setSocialPhoto(link); userToReg.setGoogleId(googleId); UserRole userRole = new UserRole(); userRole.setRole("гість"); userRole.setId(3); List<UserRole> list = new ArrayList<UserRole>(); list.add(userRole); userToReg.setUserRole(list); UserType userType = new UserType(); userType.setId(1); userToReg.setUserType(userType); Date currentDate = new Date(new java.util.Date().getTime()); System.out.println(currentDate); userToReg.setRegistrationDate(currentDate); //inserting user to DB try { userRep.insert(userToReg); } catch (Exception e) { return SERVER_ERROR; } //creating session setUpSuccessSession(userToReg, session, "successful Registration with GoogleId"); session.setAttribute("refreshGoogleToken", refreshGoogleToken); //session.setAttribute("user", userToReg); //Entering to site with Session return Response.temporaryRedirect(UriBuilder.fromUri(successURL).build()).build(); } @GET @Path("login/google_login_direct") //http://localhost:8080/webapi/account/login/google_login_direct public Response directGoogleLoginWithOldAccessToken(@Context HttpServletRequest req, @QueryParam("code") String refreshGoogleToken) { String pathAll = req.getRequestURL().toString(); String pathMain =pathAll.replace("webapi/account/login/google_login_direct", ""); System.out.println("pathMain - " + pathMain); String successURL = pathMain + "#/ua/user/profile"; System.out.println(successURL); String callbackUrlG = pathMain + callbackUrlGPath; System.out.println(callbackUrlG); System.out.println("Google refresh token - "+ refreshGoogleToken); //getting new access token with old refreshToken OAuthRequest request = new OAuthRequest(Verb.POST, "https://www.googleapis.com/oauth2/v3/token"); request.addBodyParameter("grant_type", "refresh_token"); request.addBodyParameter("refresh_token", refreshGoogleToken); // were accessToken is the Token object you want to refresh. request.addBodyParameter("client_id", apiKeyG); request.addBodyParameter("client_secret", apiSecretG); org.scribe.model.Response response = request.send(); System.out.println(response.getCode()); //200 - success System.out.println(response.getBody()); //JSON response //JSON string from Google response String json = response.getBody(); //parse string String new_access_token=null; try { JSONParser jsonParser = new JSONParser(); JSONObject jsonObject = (JSONObject) jsonParser.parse(json); new_access_token = (String) jsonObject.get("access_token"); System.out.println("new accessToken is: " + new_access_token); } catch (ParseException e) { e.printStackTrace(); } catch (NullPointerException ex) { ex.printStackTrace(); } //New request to protected resource with new accessToken and old refreshToken OAuthService service2 =null; try { service2 = new ServiceBuilder() .provider(Google2Api.class) .apiKey(apiKeyG) .apiSecret(apiSecretG) .callback(callbackUrlG) .scope(SCOPE) .offline(true) .build(); } catch (Exception e1) { e1.printStackTrace(); } Token accessToken = new Token(new_access_token, refreshGoogleToken); //Request protected resource OAuthRequest request2 = new OAuthRequest(Verb.GET, PROTECTED_RESOURCE_URL); service2.signRequest(accessToken, request2); org.scribe.model.Response response2 = request2.send(); System.out.println(response2.getCode()); //200 - success System.out.println(response2.getBody()); //JSON response //JSON string from Google response String json2 = response2.getBody(); //parse string String googleId=null; try { JSONParser jsonParser = new JSONParser(); JSONObject jsonObject = (JSONObject) jsonParser.parse(json2); googleId = (String) jsonObject.get("id"); System.out.println("id is: " + googleId); } catch (ParseException e) { e.printStackTrace(); } catch (NullPointerException ex) { ex.printStackTrace(); } //Login to site. Session is not set. Find User by googleId HttpSession sessionNew = req.getSession(true); //Check if user exist by googleId User user=null; try { user = userRep.getByGoogleId(googleId); System.out.println(user); } catch (Exception e) { return SERVER_ERROR; } if (user == null) { return NOT_FOUND; } //creating Session for founded user. Setting params setUpSuccessSession(user, sessionNew, "success direct login with GoogleId"); sessionNew.setAttribute("refreshGoogleToken", refreshGoogleToken); return Response.status(Response.Status.OK).entity(successURL).build(); } private static void setUpSuccessSession(User user, HttpSession session, String success){ session.setAttribute("userName",user.getName()); session.setAttribute("userId",user.getId().toString()); session.setAttribute("userSurname",user.getSurname()); session.setAttribute("socialLogin",user.getSocialLogin()); session.setAttribute("userRoleId",user.getUserRole().get(0).getId().toString()); session.setAttribute("userRole",user.getUserRole().get(0).getRole()); session.setAttribute("successMesage", success); session.setAttribute("user", user); //creating string for accessToken String accessToken = (String)session.getId() + ":" + (String)session.getAttribute("userId"); System.out.println("decoded accesToken - " + accessToken); String accessTokenEncoded=null; try { byte[] encoded = Base64.encodeBase64(accessToken.getBytes()); accessTokenEncoded = new String(encoded, "UTF-8"); } catch (IOException e) { e.printStackTrace(); } System.out.println("encoded accessToken -" + accessTokenEncoded); session.setAttribute("accessToken", accessTokenEncoded); } }
cleaning and comments
AnimalWebApp/src/main/java/com/animals/app/controller/resource/OAuthAuthorizationResource.java
cleaning and comments
Java
mit
ef5ea18fbe02c3a8cb53c041b53cb49bff2efdb8
0
Riesenmumpitz/crap-sms
package com.crap.sms.service; import java.util.List; import com.crap.sms.domain.model.Subscriber; import com.crap.sms.domain.model.Subscription; import com.crap.sms.domain.repository.SubscriberRepository; public class Invoice { private SubscriberRepository sr; private List<Subscriber> subscribers; public Invoice() { sr = SubscriberRepository.getInstance(); subscribers = sr.getAll(); } public String work(){ String invoice = "INVOICE\n"; for(Subscriber s : subscribers){ invoice+=getSubscriberInvoice(s); s.setDataVolume(0); s.setUsedMinutes(0); sr.save(s); } return invoice; } private double getCharge(int usedMin, Subscription sub){ double charge = 0; switch(sub){ case GreenMobileS: charge+=8; charge+=0.08*usedMin; break; case GreenMobileM: charge+=22; if(usedMin>100){ charge+= 0.06*(usedMin-100); } break; case GreenMobileL: charge+=42; if(usedMin>150){ charge+= 0.04*(usedMin-150); } break; } return charge; } private String getSubscriberInvoice(Subscriber s) { String invoice = ""; invoice+="\n"+"Name: "+s.getSurName()+", "+s.getForeName()+"\n"; invoice+="Consumed data volume: "+s.getDataVolume()+"\n"; invoice+="Consumed minutes: "+s.getUsedMinutes()+"\n"; invoice+="Total charges: "+getCharge(s.getUsedMinutes(), s.getSubscription())+"\n\n"; return invoice; } }
src/main/java/com/crap/sms/service/Invoice.java
package com.crap.sms.service; import java.util.List; import com.crap.sms.domain.model.Subscriber; import com.crap.sms.domain.model.Subscription; import com.crap.sms.domain.repository.AbstractRepository; import com.crap.sms.domain.repository.SubscriberRepository; public class Invoice { private SubscriberRepository sr; private List<Subscriber> subscribers; public Invoice() { sr = SubscriberRepository.getInstance(); subscribers = sr.getAll(); } public String work(){ String invoice = ""; for(Subscriber s : subscribers){ //TODO reset Subscription e = s.getSubscription(); switch(e){ case GreenMobileS: invoice+=getSubscriberInvoice(s)+"\n"; break; case GreenMobileM: break; case GreenMobileL: break; } } return invoice; } private String getSubscriberInvoice(Subscriber s) { String invoice = ""; invoice+="\n"+"Name: "+s.getSurName()+", "+s.getForeName()+"\n"; invoice+=""; return ""; } }
Invoice class changes
src/main/java/com/crap/sms/service/Invoice.java
Invoice class changes
Java
epl-1.0
dd8c1240a2a6a20314fd467615377aee7242042e
0
opcoach/training,opcoach/training
package com.opcoach.training.rental.ui; import java.util.HashMap; import java.util.Map; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IConfigurationElement; import org.eclipse.core.runtime.IExtension; import org.eclipse.core.runtime.IExtensionPoint; import org.eclipse.core.runtime.IExtensionRegistry; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Platform; import org.eclipse.core.runtime.Status; import org.eclipse.jface.resource.ImageDescriptor; import org.eclipse.jface.resource.ImageRegistry; import org.eclipse.jface.resource.JFaceResources; import org.eclipse.jface.viewers.IColorProvider; import org.eclipse.swt.graphics.Image; import org.eclipse.ui.plugin.AbstractUIPlugin; import org.osgi.framework.BundleContext; /** * The activator class controls the plug-in life cycle */ public class RentalUIActivator extends AbstractUIPlugin implements RentalUIConstants { // The plug-in ID public static final String PLUGIN_ID = "com.opcoach.training.rental.ui"; // The shared instance private static RentalUIActivator plugin; /** The map of possible color providers (read in extensions) */ private Map<String, IColorProvider> paletteManager = new HashMap<String, IColorProvider>(); /** * The constructor */ public RentalUIActivator() { } /* * (non-Javadoc) * @see org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext) */ public void start(BundleContext context) throws Exception { super.start(context); plugin = this; System.out.println("Start rental ui bundle"); readViewExtensions(); readColorProviderExtensions(); } public void readColorProviderExtensions() { IExtensionRegistry reg = Platform.getExtensionRegistry(); IExtensionPoint extp = reg.getExtensionPoint("com.opcoach.training.rental.ui.ColorProvider"); IExtension[] extensions = extp.getExtensions(); for (IExtension ext : extensions) { IConfigurationElement[] config = ext.getConfigurationElements(); for (IConfigurationElement elt : config) { // Create the color provider for label. try { // Create the executable extension Object exeExt = elt.createExecutableExtension("colorProviderClass"); // Add it (with its name) in the color provider map String name = elt.getAttribute("name"); paletteManager.put(name, (IColorProvider) exeExt); } catch (CoreException e) { IStatus st = new Status(IStatus.ERROR, PLUGIN_ID, "Impossible de creer la classe de palette : "+ elt.getAttribute("colorProviderClass"),e); getLog().log(st); } } } } public void readViewExtensions() { IExtensionRegistry reg = Platform.getExtensionRegistry(); for (IConfigurationElement elt : reg.getConfigurationElementsFor("org.eclipse.ui.views")) { if (elt.getName().equals("view")) System.out.println("Plugin : " + elt.getNamespaceIdentifier() + "\t\t\tVue : " + elt.getAttribute("name")); } } /* @return a never null collection of overriden color providers */ public Map<String, IColorProvider> getPaletteManager() { return paletteManager; } /* * (non-Javadoc) * @see org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext) */ public void stop(BundleContext context) throws Exception { plugin = null; super.stop(context); } /** * Returns the shared instance * * @return the shared instance */ public static RentalUIActivator getDefault() { return plugin; } /** * Returns an image descriptor for the image file at the given plug-in relative path * * @param path the path * @return the image descriptor */ public static ImageDescriptor getImageDescriptor(String path) { return imageDescriptorFromPlugin(PLUGIN_ID, path); } @Override protected void initializeImageRegistry(ImageRegistry reg) { reg.put(CUSTOMER_KEY, getImageDescriptor("icons/Customers.png")); reg.put(RENTAL_KEY, getImageDescriptor("icons/Rentals.png")); reg.put(RENTAL_OBJECT_KEY, getImageDescriptor("icons/RentalObjects.png")); reg.put(AGENCY_KEY, getImageDescriptor("icons/Agency.png")); } public Image getMyImage(String path) { // Utilise le Registry global de JfaceResources ImageRegistry reg = JFaceResources.getImageRegistry(); // Essai de rcuperer l'image peut tre dj prsente Image img = reg.get(path); if (img == null) { // L'image n'est pas encore stocke dans le registry, on l'ajoute ImageDescriptor desc = ImageDescriptor.createFromFile(this.getClass(), path); // Le path sert de cl reg.put(path, desc); img = reg.get(path); } return img; } }
RCP/com.opcoach.training.rental.ui/src/com/opcoach/training/rental/ui/RentalUIActivator.java
package com.opcoach.training.rental.ui; import java.util.HashMap; import java.util.Map; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IConfigurationElement; import org.eclipse.core.runtime.IExtension; import org.eclipse.core.runtime.IExtensionPoint; import org.eclipse.core.runtime.IExtensionRegistry; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Platform; import org.eclipse.core.runtime.Status; import org.eclipse.jface.resource.ImageDescriptor; import org.eclipse.jface.resource.ImageRegistry; import org.eclipse.jface.resource.JFaceResources; import org.eclipse.jface.viewers.IColorProvider; import org.eclipse.swt.graphics.Image; import org.eclipse.ui.plugin.AbstractUIPlugin; import org.osgi.framework.BundleContext; /** * The activator class controls the plug-in life cycle */ public class RentalUIActivator extends AbstractUIPlugin implements RentalUIConstants { // The plug-in ID public static final String PLUGIN_ID = "com.opcoach.training.rental.ui"; // The shared instance private static RentalUIActivator plugin; /** The map of possible color providers (read in extensions) */ private Map<String, IColorProvider> paletteManager = new HashMap<String, IColorProvider>(); /** * The constructor */ public RentalUIActivator() { } /* * (non-Javadoc) * @see org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext) */ public void start(BundleContext context) throws Exception { super.start(context); plugin = this; System.out.println("Start rental ui bundle"); readViewExtensions(); readColorProviderExtensions(); } public void readColorProviderExtensions() { IExtensionRegistry reg = Platform.getExtensionRegistry(); IExtensionPoint extp = reg.getExtensionPoint("com.opcoach.training.rental.ui.ColorProvider"); IExtension[] extensions = extp.getExtensions(); for (IExtension ext : extensions) { IConfigurationElement[] config = ext.getConfigurationElements(); for (IConfigurationElement elt : config) { // Create the color provider for label. try { // Create the executable extension Object exeExt = elt.createExecutableExtension("colorProviderClass"); // Add it (with its name) in the color provider map String name = elt.getAttribute("name"); paletteManager.put(name, (IColorProvider) exeExt); } catch (CoreException e) { IStatus st = new Status(IStatus.ERROR, PLUGIN_ID, "Impossible de creer la classe de palette : "+elt.getAttribute("colorProviderClass"),e); getLog().log(st); } } } } public void readViewExtensions() { IExtensionRegistry reg = Platform.getExtensionRegistry(); for (IConfigurationElement elt : reg.getConfigurationElementsFor("org.eclipse.ui.views")) { if (elt.getName().equals("view")) System.out.println("Plugin : " + elt.getNamespaceIdentifier() + "\t\t\tVue : " + elt.getAttribute("name")); } } /* @return a never null collection of overriden color providers */ public Map<String, IColorProvider> getPaletteManager() { return paletteManager; } /* * (non-Javadoc) * @see org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext) */ public void stop(BundleContext context) throws Exception { plugin = null; super.stop(context); } /** * Returns the shared instance * * @return the shared instance */ public static RentalUIActivator getDefault() { return plugin; } /** * Returns an image descriptor for the image file at the given plug-in relative path * * @param path the path * @return the image descriptor */ public static ImageDescriptor getImageDescriptor(String path) { return imageDescriptorFromPlugin(PLUGIN_ID, path); } @Override protected void initializeImageRegistry(ImageRegistry reg) { reg.put(CUSTOMER_KEY, getImageDescriptor("icons/Customers.png")); reg.put(RENTAL_KEY, getImageDescriptor("icons/Rentals.png")); reg.put(RENTAL_OBJECT_KEY, getImageDescriptor("icons/RentalObjects.png")); reg.put(AGENCY_KEY, getImageDescriptor("icons/Agency.png")); } public Image getMyImage(String path) { // Utilise le Registry global de JfaceResources ImageRegistry reg = JFaceResources.getImageRegistry(); // Essai de rcuperer l'image peut tre dj prsente Image img = reg.get(path); if (img == null) { // L'image n'est pas encore stocke dans le registry, on l'ajoute ImageDescriptor desc = ImageDescriptor.createFromFile(this.getClass(), path); // Le path sert de cl reg.put(path, desc); img = reg.get(path); } return img; } }
Reformattage git-svn-id: 5dcf22192dadc9e4089bc59a7a60e4d1091c81c1@325 28ab6df1-7a97-4e50-b59a-519495a15fef
RCP/com.opcoach.training.rental.ui/src/com/opcoach/training/rental/ui/RentalUIActivator.java
Reformattage
Java
agpl-3.0
4089442d03409de25f2744010cb524832074a3d3
0
Scentsome/daily_something
package com.zencher.app.dailysomething; import android.app.Activity; import android.os.Bundle; public class MyActivity extends Activity { /** * Called when the activity is first created. by Alen& Pete */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); int num = 9; } }
src/com/zencher/app/dailysomething/MyActivity.java
package com.zencher.app.dailysomething; import android.app.Activity; import android.os.Bundle; public class MyActivity extends Activity { /** * Called when the activity is first created. by Alen */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); int num = 9; } }
Added comment. "Pete"
src/com/zencher/app/dailysomething/MyActivity.java
Added comment. "Pete"
Java
agpl-3.0
2016a68d03a0c9f03c94aa4af71bb09125b3b3e4
0
thklaus/rstudio,maligulzar/Rstudio-instrumented,JanMarvin/rstudio,brsimioni/rstudio,brsimioni/rstudio,brsimioni/rstudio,jzhu8803/rstudio,jzhu8803/rstudio,suribes/rstudio,maligulzar/Rstudio-instrumented,tbarrongh/rstudio,john-r-mcpherson/rstudio,more1/rstudio,jar1karp/rstudio,edrogers/rstudio,edrogers/rstudio,JanMarvin/rstudio,piersharding/rstudio,thklaus/rstudio,nvoron23/rstudio,more1/rstudio,jzhu8803/rstudio,suribes/rstudio,sfloresm/rstudio,piersharding/rstudio,pssguy/rstudio,maligulzar/Rstudio-instrumented,sfloresm/rstudio,jzhu8803/rstudio,githubfun/rstudio,more1/rstudio,vbelakov/rstudio,githubfun/rstudio,vbelakov/rstudio,JanMarvin/rstudio,tbarrongh/rstudio,vbelakov/rstudio,tbarrongh/rstudio,jzhu8803/rstudio,jzhu8803/rstudio,githubfun/rstudio,edrogers/rstudio,edrogers/rstudio,maligulzar/Rstudio-instrumented,jrnold/rstudio,tbarrongh/rstudio,thklaus/rstudio,jar1karp/rstudio,jrnold/rstudio,thklaus/rstudio,jrnold/rstudio,sfloresm/rstudio,piersharding/rstudio,brsimioni/rstudio,suribes/rstudio,nvoron23/rstudio,pssguy/rstudio,sfloresm/rstudio,edrogers/rstudio,tbarrongh/rstudio,suribes/rstudio,suribes/rstudio,nvoron23/rstudio,john-r-mcpherson/rstudio,githubfun/rstudio,brsimioni/rstudio,more1/rstudio,pssguy/rstudio,edrogers/rstudio,suribes/rstudio,jar1karp/rstudio,tbarrongh/rstudio,jar1karp/rstudio,vbelakov/rstudio,jrnold/rstudio,brsimioni/rstudio,piersharding/rstudio,sfloresm/rstudio,JanMarvin/rstudio,jrnold/rstudio,jrnold/rstudio,JanMarvin/rstudio,maligulzar/Rstudio-instrumented,brsimioni/rstudio,piersharding/rstudio,piersharding/rstudio,piersharding/rstudio,thklaus/rstudio,sfloresm/rstudio,thklaus/rstudio,john-r-mcpherson/rstudio,jzhu8803/rstudio,jzhu8803/rstudio,john-r-mcpherson/rstudio,more1/rstudio,maligulzar/Rstudio-instrumented,nvoron23/rstudio,JanMarvin/rstudio,more1/rstudio,thklaus/rstudio,piersharding/rstudio,jar1karp/rstudio,JanMarvin/rstudio,john-r-mcpherson/rstudio,more1/rstudio,vbelakov/rstudio,githubfun/rstudio,edrogers/rstudio,jar1karp/rstudio,maligulzar/Rstudio-instrumented,vbelakov/rstudio,JanMarvin/rstudio,tbarrongh/rstudio,john-r-mcpherson/rstudio,suribes/rstudio,JanMarvin/rstudio,pssguy/rstudio,piersharding/rstudio,sfloresm/rstudio,nvoron23/rstudio,tbarrongh/rstudio,jrnold/rstudio,githubfun/rstudio,vbelakov/rstudio,jar1karp/rstudio,more1/rstudio,maligulzar/Rstudio-instrumented,jar1karp/rstudio,john-r-mcpherson/rstudio,githubfun/rstudio,thklaus/rstudio,nvoron23/rstudio,brsimioni/rstudio,sfloresm/rstudio,pssguy/rstudio,nvoron23/rstudio,jar1karp/rstudio,pssguy/rstudio,suribes/rstudio,githubfun/rstudio,john-r-mcpherson/rstudio,pssguy/rstudio,jrnold/rstudio,edrogers/rstudio,maligulzar/Rstudio-instrumented,jrnold/rstudio,pssguy/rstudio,vbelakov/rstudio
/* * BuildToolsPackagePanel.java * * Copyright (C) 2009-12 by RStudio, Inc. * * Unless you have received this program directly from RStudio pursuant * to the terms of a commercial license agreement with RStudio, then * this program is licensed to you under the terms of version 3 of the * GNU Affero General Public License. This program is distributed WITHOUT * ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT, * MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the * AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details. * */ package org.rstudio.studio.client.projects.ui.prefs.buildtools; import org.rstudio.core.client.widget.OperationWithInput; import org.rstudio.core.client.widget.ThemedButton; import org.rstudio.studio.client.RStudioGinjector; import org.rstudio.studio.client.common.HelpLink; import org.rstudio.studio.client.common.PackagesHelpLink; import org.rstudio.studio.client.projects.model.RProjectBuildOptions; import org.rstudio.studio.client.projects.model.RProjectConfig; import org.rstudio.studio.client.projects.model.RProjectOptions; import org.rstudio.studio.client.projects.ui.prefs.ProjectPreferencesDialogResources; import org.rstudio.studio.client.workbench.WorkbenchContext; import com.google.gwt.dom.client.Style.Unit; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.event.logical.shared.ValueChangeEvent; import com.google.gwt.event.logical.shared.ValueChangeHandler; import com.google.gwt.safehtml.shared.SafeHtmlBuilder; import com.google.gwt.user.client.ui.CheckBox; import com.google.gwt.user.client.ui.HorizontalPanel; import com.google.gwt.user.client.ui.VerticalPanel; import com.google.inject.Inject; public class BuildToolsPackagePanel extends BuildToolsPanel { public BuildToolsPackagePanel() { RStudioGinjector.INSTANCE.injectMembers(this); ProjectPreferencesDialogResources RES = ProjectPreferencesDialogResources.INSTANCE; pathSelector_ = new DirectorySelector("Package directory:"); pathSelector_.getElement().getStyle().setMarginBottom(10, Unit.PX); add(pathSelector_); pathSelector_.addValueChangeHandler(new ValueChangeHandler<String>() { @Override public void onValueChange(ValueChangeEvent<String> event) { if (pathSelector_.getText().equals( workbenchContext_.getActiveProjectDir().getPath())) { pathSelector_.setText(""); } } }); chkUseDevtools_ = checkBox( "Use devtools package functions if available"); chkUseDevtools_.addStyleName(RES.styles().buildToolsDevtools()); add(chkUseDevtools_); roxygenizePanel_ = new VerticalPanel(); roxygenizePanel_.addStyleName(RES.styles().buildToolsRoxygenize()); HorizontalPanel rocletPanel = new HorizontalPanel(); chkUseRoxygen_ = checkBox("Generate documentation with Roxygen"); rocletPanel.add(chkUseRoxygen_); btnConfigureRoxygen_ = new ThemedButton("Configure..."); btnConfigureRoxygen_.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { new BuildToolsRoxygenOptionsDialog( roxygenOptions_, new OperationWithInput<BuildToolsRoxygenOptions>() { @Override public void execute(BuildToolsRoxygenOptions input) { roxygenOptions_ = input; chkUseRoxygen_.setValue(input.getRocletRd() || input.getRocletCollate() || input.getRocletNamespace()); } }).showModal(); } }); rocletPanel.add(btnConfigureRoxygen_); roxygenizePanel_.add(rocletPanel); add(roxygenizePanel_); add(installAdditionalArguments_ = new AdditionalArguments( new SafeHtmlBuilder().appendHtmlConstant( "Build and Reload &mdash; R CMD INSTALL additional options:").toSafeHtml())); add(checkAdditionalArguments_ = new AdditionalArguments( new SafeHtmlBuilder().appendHtmlConstant( "Check Package &mdash; R CMD check additional options:").toSafeHtml())); add(buildAdditionalArguments_ = new AdditionalArguments( new SafeHtmlBuilder().appendHtmlConstant( "Build Source Package &mdash; R CMD build additional options:").toSafeHtml())); add(buildBinaryAdditionalArguments_ = new AdditionalArguments( new SafeHtmlBuilder().appendHtmlConstant( "Build Binary Package &mdash; R CMD INSTALL additional options:").toSafeHtml())); HelpLink packagesHelpLink = new PackagesHelpLink(); packagesHelpLink.getElement().getStyle().setMarginTop(7, Unit.PX); add(packagesHelpLink); } @Inject public void initialize(WorkbenchContext workbenchContext) { workbenchContext_ = workbenchContext; } @Override protected void provideDefaults() { installAdditionalArguments_.setText("--no-multiarch --with-keep.source"); chkUseDevtools_.setValue(true); } @Override void load(RProjectOptions options) { RProjectConfig config = options.getConfig(); pathSelector_.setText(config.getPackagePath()); installAdditionalArguments_.setText(config.getPackageInstallArgs()); buildAdditionalArguments_.setText(config.getPackageBuildArgs()); buildBinaryAdditionalArguments_.setText(config.getPackageBuildBinaryArgs()); checkAdditionalArguments_.setText(config.getPackageCheckArgs()); roxygenOptions_ = new BuildToolsRoxygenOptions( config.getPackageRoxygenzieRd(), config.getPackageRoxygenizeCollate(), config.getPackageRoxygenizeNamespace(), options.getBuildOptions().getAutoRogyginizeOptions()); boolean showRoxygenize = config.hasPackageRoxygenize() || options.getBuildContext().isRoxygen2Installed(); roxygenizePanel_.setVisible(showRoxygenize); chkUseDevtools_.setValue(config.getPackageUseDevtools()); chkUseRoxygen_.setValue(config.hasPackageRoxygenize()); chkUseRoxygen_.addValueChangeHandler(new ValueChangeHandler<Boolean>() { @Override public void onValueChange(ValueChangeEvent<Boolean> event) { if (event.getValue()) { if (!roxygenOptions_.hasActiveRoclet()) { roxygenOptions_.setRocletRd(true); roxygenOptions_.setRocletCollate(true); roxygenOptions_.setRocletNamespace(true); } btnConfigureRoxygen_.click(); } else { roxygenOptions_.clearRoclets(); } } }); } @Override void save(RProjectOptions options) { RProjectConfig config = options.getConfig(); config.setPackageUseDevtools(chkUseDevtools_.getValue()); config.setPackagePath(pathSelector_.getText()); config.setPackageInstallArgs(installAdditionalArguments_.getText()); config.setPackageBuildArgs(buildAdditionalArguments_.getText()); config.setPackageBuildBinaryArgs(buildBinaryAdditionalArguments_.getText()); config.setPackageCheckArgs(checkAdditionalArguments_.getText()); config.setPackageRoxygenize(roxygenOptions_.getRocletRd(), roxygenOptions_.getRocletCollate(), roxygenOptions_.getRocletNamespace()); RProjectBuildOptions buildOptions = options.getBuildOptions(); buildOptions.setAutoRoxyginizeOptions( roxygenOptions_.getAutoRoxygenize()); } private PathSelector pathSelector_; private AdditionalArguments installAdditionalArguments_; private AdditionalArguments buildAdditionalArguments_; private AdditionalArguments buildBinaryAdditionalArguments_; private AdditionalArguments checkAdditionalArguments_; private BuildToolsRoxygenOptions roxygenOptions_; private VerticalPanel roxygenizePanel_; private CheckBox chkUseRoxygen_; private CheckBox chkUseDevtools_; private ThemedButton btnConfigureRoxygen_; private WorkbenchContext workbenchContext_; }
src/gwt/src/org/rstudio/studio/client/projects/ui/prefs/buildtools/BuildToolsPackagePanel.java
/* * BuildToolsPackagePanel.java * * Copyright (C) 2009-12 by RStudio, Inc. * * Unless you have received this program directly from RStudio pursuant * to the terms of a commercial license agreement with RStudio, then * this program is licensed to you under the terms of version 3 of the * GNU Affero General Public License. This program is distributed WITHOUT * ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT, * MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the * AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details. * */ package org.rstudio.studio.client.projects.ui.prefs.buildtools; import org.rstudio.core.client.widget.OperationWithInput; import org.rstudio.core.client.widget.ThemedButton; import org.rstudio.studio.client.RStudioGinjector; import org.rstudio.studio.client.common.HelpLink; import org.rstudio.studio.client.common.PackagesHelpLink; import org.rstudio.studio.client.projects.model.RProjectBuildOptions; import org.rstudio.studio.client.projects.model.RProjectConfig; import org.rstudio.studio.client.projects.model.RProjectOptions; import org.rstudio.studio.client.projects.ui.prefs.ProjectPreferencesDialogResources; import org.rstudio.studio.client.workbench.WorkbenchContext; import com.google.gwt.dom.client.Style.Unit; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.event.logical.shared.ValueChangeEvent; import com.google.gwt.event.logical.shared.ValueChangeHandler; import com.google.gwt.safehtml.shared.SafeHtmlBuilder; import com.google.gwt.user.client.ui.CheckBox; import com.google.gwt.user.client.ui.HorizontalPanel; import com.google.gwt.user.client.ui.VerticalPanel; import com.google.inject.Inject; public class BuildToolsPackagePanel extends BuildToolsPanel { public BuildToolsPackagePanel() { RStudioGinjector.INSTANCE.injectMembers(this); ProjectPreferencesDialogResources RES = ProjectPreferencesDialogResources.INSTANCE; pathSelector_ = new DirectorySelector("Package directory:"); pathSelector_.getElement().getStyle().setMarginBottom(10, Unit.PX); add(pathSelector_); pathSelector_.addValueChangeHandler(new ValueChangeHandler<String>() { @Override public void onValueChange(ValueChangeEvent<String> event) { if (pathSelector_.getText().equals( workbenchContext_.getActiveProjectDir().getPath())) { pathSelector_.setText(""); } } }); chkUseDevtools_ = checkBox( "Use devtools package functions if available"); chkUseDevtools_.addStyleName(RES.styles().buildToolsDevtools()); add(chkUseDevtools_); roxygenizePanel_ = new VerticalPanel(); roxygenizePanel_.addStyleName(RES.styles().buildToolsRoxygenize()); HorizontalPanel rocletPanel = new HorizontalPanel(); chkUseRoxygen_ = checkBox("Generate documentation with Roxygen"); rocletPanel.add(chkUseRoxygen_); btnConfigureRoxygen_ = new ThemedButton("Configure..."); btnConfigureRoxygen_.addClickHandler(new ClickHandler() { @Override public void onClick(ClickEvent event) { new BuildToolsRoxygenOptionsDialog( roxygenOptions_, new OperationWithInput<BuildToolsRoxygenOptions>() { @Override public void execute(BuildToolsRoxygenOptions input) { roxygenOptions_ = input; chkUseRoxygen_.setValue(input.getRocletRd() || input.getRocletCollate() || input.getRocletNamespace()); } }).showModal(); } }); rocletPanel.add(btnConfigureRoxygen_); roxygenizePanel_.add(rocletPanel); add(roxygenizePanel_); add(installAdditionalArguments_ = new AdditionalArguments( new SafeHtmlBuilder().appendHtmlConstant( "Build and Reload &mdash; R CMD INSTALL additional options:").toSafeHtml())); add(checkAdditionalArguments_ = new AdditionalArguments( new SafeHtmlBuilder().appendHtmlConstant( "Check Package &mdash; R CMD check additional options:").toSafeHtml())); add(buildAdditionalArguments_ = new AdditionalArguments( new SafeHtmlBuilder().appendHtmlConstant( "Build Source Package &mdash; R CMD build additional options:").toSafeHtml())); add(buildBinaryAdditionalArguments_ = new AdditionalArguments( new SafeHtmlBuilder().appendHtmlConstant( "Build Binary Package &mdash; R CMD INSTALL additional options:").toSafeHtml())); HelpLink packagesHelpLink = new PackagesHelpLink(); packagesHelpLink.getElement().getStyle().setMarginTop(7, Unit.PX); add(packagesHelpLink); } @Inject public void initialize(WorkbenchContext workbenchContext) { workbenchContext_ = workbenchContext; } @Override protected void provideDefaults() { installAdditionalArguments_.setText("--no-multiarch --with-keep.source"); chkUseDevtools_.setValue(true); } @Override void load(RProjectOptions options) { RProjectConfig config = options.getConfig(); pathSelector_.setText(config.getPackagePath()); installAdditionalArguments_.setText(config.getPackageInstallArgs()); buildAdditionalArguments_.setText(config.getPackageBuildArgs()); buildBinaryAdditionalArguments_.setText(config.getPackageBuildBinaryArgs()); checkAdditionalArguments_.setText(config.getPackageCheckArgs()); roxygenOptions_ = new BuildToolsRoxygenOptions( config.getPackageRoxygenzieRd(), config.getPackageRoxygenizeCollate(), config.getPackageRoxygenizeNamespace(), options.getBuildOptions().getAutoRogyginizeOptions()); boolean showRoxygenize = config.hasPackageRoxygenize() || options.getBuildContext().isRoxygen2Installed(); roxygenizePanel_.setVisible(showRoxygenize); chkUseDevtools_.setValue(config.getPackageUseDevtools()); chkUseRoxygen_.setValue(config.hasPackageRoxygenize()); chkUseRoxygen_.addValueChangeHandler(new ValueChangeHandler<Boolean>() { @Override public void onValueChange(ValueChangeEvent<Boolean> event) { if (event.getValue()) { if (!roxygenOptions_.hasActiveRoclet()) roxygenOptions_.setRocletRd(true); btnConfigureRoxygen_.click(); } else { roxygenOptions_.clearRoclets(); } } }); } @Override void save(RProjectOptions options) { RProjectConfig config = options.getConfig(); config.setPackageUseDevtools(chkUseDevtools_.getValue()); config.setPackagePath(pathSelector_.getText()); config.setPackageInstallArgs(installAdditionalArguments_.getText()); config.setPackageBuildArgs(buildAdditionalArguments_.getText()); config.setPackageBuildBinaryArgs(buildBinaryAdditionalArguments_.getText()); config.setPackageCheckArgs(checkAdditionalArguments_.getText()); config.setPackageRoxygenize(roxygenOptions_.getRocletRd(), roxygenOptions_.getRocletCollate(), roxygenOptions_.getRocletNamespace()); RProjectBuildOptions buildOptions = options.getBuildOptions(); buildOptions.setAutoRoxyginizeOptions( roxygenOptions_.getAutoRoxygenize()); } private PathSelector pathSelector_; private AdditionalArguments installAdditionalArguments_; private AdditionalArguments buildAdditionalArguments_; private AdditionalArguments buildBinaryAdditionalArguments_; private AdditionalArguments checkAdditionalArguments_; private BuildToolsRoxygenOptions roxygenOptions_; private VerticalPanel roxygenizePanel_; private CheckBox chkUseRoxygen_; private CheckBox chkUseDevtools_; private ThemedButton btnConfigureRoxygen_; private WorkbenchContext workbenchContext_; }
roxygen: default to use rd, collate, and namespace roclets
src/gwt/src/org/rstudio/studio/client/projects/ui/prefs/buildtools/BuildToolsPackagePanel.java
roxygen: default to use rd, collate, and namespace roclets
Java
lgpl-2.1
c84827f5a458b2f8d1d063e704b96a3475b5f9ac
0
sbliven/biojava,sbliven/biojava,sbliven/biojava
/* * BioJava development code * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. If you do not have a copy, * see: * * http://www.gnu.org/copyleft/lesser.html * * Copyright for this code is held jointly by the individual * authors. These should be listed in @author doc comments. * * For more information on the BioJava project and its aims, * or to join the biojava-l mailing list, visit the home page * at: * * http://www.biojava.org/ * * Created on 16.03.2004 * @author Andreas Prlic * * */ package org.biojava.bio.structure.io; import java.io.BufferedReader; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintWriter; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.zip.GZIPOutputStream; import org.biojava.bio.structure.AminoAcid; import org.biojava.bio.structure.Chain; import org.biojava.bio.structure.Group; import org.biojava.bio.structure.GroupIterator; import org.biojava.bio.structure.Structure; import org.biojava.utils.io.InputStreamProvider; /** * <p> * The wrapper class for parsing a PDB file. * </p> * * * <p> * Several flags can be set for this class * <ul> * <li> {@link #setParseCAOnly} - parse only the Atom records for C-alpha atoms (default:false)</li> * <li> {@link #setParseSecStruc} - a flag if the secondary structure information from the PDB file (author's assignment) should be parsed. * If true the assignment can be accessed through {@link AminoAcid}.getSecStruc(); (default:false)</li> * <li> {@link #setAlignSeqRes(boolean)} - should the AminoAcid sequences from the SEQRES * and ATOM records of a PDB file be aligned? (default:true)</li> * <li> {@link #setAutoFetch(boolean)} - if the PDB file can not be found locally, should it be fetched * from the EBI - ftp server? (default:false)</li> * </ul> * </p> * * * *<h2>Example</h2> * <p> * Q: How can I get a Structure object from a PDB file? * </p> * <p> * A: * <pre> public {@link Structure} loadStructure(String pathToPDBFile){ {@link PDBFileReader} pdbreader = new {@link PDBFileReader}(); {@link Structure} structure = null; try{ structure = pdbreader.getStructure(pathToPDBFile); System.out.println(structure); } catch (IOException e) { e.printStackTrace(); } return structure; } </pre> * * Access PDB files from a directory, take care of compressed PDB files * <pre> * public {@link Structure} loadStructureById() { String path = "/path/to/PDB/directory/"; {@link PDBFileReader} pdbreader = new {@link PDBFileReader}(); pdbreader.setPath(path); {@link Structure} structure = null; try { structure = pdbreader.getStructureById("5pti"); } catch (IOException e){ e.printStackTrace(); } return structure; } </pre> * * * @author Andreas Prlic * */ public class PDBFileReader implements StructureIOFile { // a list of big pdb files for testing // "1htq", // "1c2w", // "1ffk", // "1giy", // "1j5a", // "1jj2", // "1jzx", // "1jzy", // "1jzz", // "1k01", // "1k73", // "1k8a", // "1k9m", // "1kc8", // "1kd1", // "1kqs", // "1m1k", // "1m90", // "1mkz", // "1ml5", // "1n8r", String path ; List<String> extensions ; boolean parseSecStruc; boolean autoFetch; boolean parseCAOnly; boolean alignSeqRes; public static void main(String[] args){ String filename = "/path/to/PDBFile.pdb" ; // also see the demos PDBFileReader pdbreader = new PDBFileReader(); pdbreader.setParseSecStruc(true); pdbreader.setAlignSeqRes(true); pdbreader.setParseCAOnly(false); pdbreader.setAutoFetch(true); try{ Structure struc = pdbreader.getStructure(filename); System.out.println(struc); GroupIterator gi = new GroupIterator(struc); while (gi.hasNext()){ Group g = (Group) gi.next(); Chain c = g.getParent(); if ( g instanceof AminoAcid ){ AminoAcid aa = (AminoAcid)g; Map<String,String> sec = aa.getSecStruc(); System.out.println(c.getName() + " " + g + " " + sec); } } } catch (Exception e) { e.printStackTrace(); } } public PDBFileReader() { extensions = new ArrayList<String>(); path = "" ; extensions.add(".ent"); extensions.add(".pdb"); extensions.add(".ent.gz"); extensions.add(".pdb.gz"); extensions.add(".ent.Z"); extensions.add(".pdb.Z"); parseSecStruc = false; autoFetch = false; parseCAOnly = false; alignSeqRes = true; } /** return the flag if only the CA atoms should be parsed * * @return flag if CA only should be read */ public boolean isParseCAOnly() { return parseCAOnly; } /** only the CA atoms should be parsed from the PDB file * * @param parseCAOnly */ public void setParseCAOnly(boolean parseCAOnly) { this.parseCAOnly = parseCAOnly; } /** get the flag if the SEQRES and ATOM amino acids are going to be aligned * * @return flag */ public boolean isAlignSeqRes() { return alignSeqRes; } /** set the flag if the SEQRES and ATOM amino acids should be aligned and linked * * @param alignSeqRes */ public void setAlignSeqRes(boolean alignSeqRes) { this.alignSeqRes = alignSeqRes; } /** should the parser to fetch missing PDB files from the EBI FTP server automatically? * default is false * @return flag */ public boolean isAutoFetch() { return autoFetch; } /** tell the parser to fetch missing PDB files from the EBI FTP server automatically. * * default is false. If true, new PDB files will be automatically stored in the Path and gzip compressed. * * @param autoFetch */ public void setAutoFetch(boolean autoFetch) { this.autoFetch = autoFetch; } /* A flag to tell the parser to parse the Author's secondary structure assignment from the file * default is set to false, i.e. do NOT parse. * @param parseSecStruc */ public boolean isParseSecStruc() { return parseSecStruc; } /* A flag to tell the parser to parse the Author's secondary structure assignment from the file * */ public void setParseSecStruc(boolean parseSecStruc) { this.parseSecStruc = parseSecStruc; } /** directory where to find PDB files */ public void setPath(String p){ path = p ; } /** * Returns the path value. * @return a String representing the path value * @see #setPath * */ public String getPath() { return path ; } /** define supported file extensions * compressed extensions .Z,.gz do not need to be specified * they are dealt with automatically. */ public void addExtension(String s){ //System.out.println("add Extension "+s); extensions.add(s); } /** clear the supported file extensions * */ public void clearExtensions(){ extensions.clear(); } /** try to find the file in the filesystem and return a filestream in order to parse it * rules how to find file * - first check: if file is in path specified by PDBpath * - secnd check: if not found check in PDBpath/xy/ where xy is second and third char of PDBcode. * if autoFetch is set it will try to download missing PDB files automatically. */ private InputStream getInputStream(String pdbId) throws IOException { //System.out.println("checking file"); // compression formats supported // this has been moved to InputStreamProvider ... //String[] str = {".gz",".zip",".Z"}; //ArrayList compressions = new ArrayList( Arrays.asList( str ) ); InputStream inputStream =null; String pdbFile = null ; File f = null ; // this are the possible PDB file names... String fpath = path+"/"+pdbId; String ppath = path +"/pdb"+pdbId; String[] paths = new String[]{fpath,ppath}; for ( int p=0;p<paths.length;p++ ){ String testpath = paths[p]; //System.out.println(testpath); for (int i=0 ; i<extensions.size();i++){ String ex = (String)extensions.get(i) ; //System.out.println("PDBFileReader testing: "+testpath+ex); f = new File(testpath+ex) ; if ( f.exists()) { //System.out.println("found!"); pdbFile = testpath+ex ; InputStreamProvider isp = new InputStreamProvider(); inputStream = isp.getInputStream(pdbFile); break; } if ( pdbFile != null) break; } } if ( pdbFile == null ) { if ( autoFetch) return downloadAndGetInputStream(pdbId); String message = "no structure with PDB code " + pdbId + " found!" ; throw new IOException (message); } return inputStream ; } private File downloadPDB(String pdbId){ if ((path == null) || (path.equals(""))){ System.err.println("you did not set the path in PDBFileReader, don;t know where to write the downloaded file to"); System.err.println("assuming default location is local directory."); path = "."; } File tempFile = new File(path+"/"+pdbId+".pdb.gz"); File pdbHome = new File(path); if ( ! pdbHome.canWrite() ){ System.err.println("can not write to " + pdbHome); return null; } String ftp = String.format("ftp://ftp.ebi.ac.uk/pub/databases/msd/pdb_uncompressed/pdb%s.ent", pdbId.toLowerCase()); System.out.println("Fetching " + ftp); try { URL url = new URL(ftp); InputStream conn = url.openStream(); // prepare destination System.out.println("writing to " + tempFile); FileOutputStream outPut = new FileOutputStream(tempFile); GZIPOutputStream gzOutPut = new GZIPOutputStream(outPut); PrintWriter pw = new PrintWriter(gzOutPut); BufferedReader fileBuffer = new BufferedReader(new InputStreamReader(conn)); String line; while ((line = fileBuffer.readLine()) != null) { pw.println(line); } pw.flush(); pw.close(); outPut.close(); conn.close(); } catch (Exception e){ e.printStackTrace(); return null; } return tempFile; } private InputStream downloadAndGetInputStream(String pdbId) throws IOException{ //PDBURLReader reader = new PDBURLReader(); //Structure s = reader.getStructureById(pdbId); File tmp = downloadPDB(pdbId); if ( tmp != null ) { InputStreamProvider prov = new InputStreamProvider(); return prov.getInputStream(tmp); } else { throw new IOException("could not find PDB " + pdbId + " in file system and also could not download"); } } /** load a structure from local file system and return a PDBStructure object * @param pdbId a String specifying the id value (PDB code) * @return the Structure object * @throws IOException ... */ public Structure getStructureById(String pdbId) throws IOException { InputStream inStream = getInputStream(pdbId); PDBFileParser pdbpars = new PDBFileParser(); pdbpars.setParseSecStruc(parseSecStruc); pdbpars.setAlignSeqRes(alignSeqRes); pdbpars.setParseCAOnly(parseCAOnly); Structure struc = pdbpars.parsePDBFile(inStream) ; return struc ; } /** opens filename, parses it and returns * aStructure object . * @param filename a String * @return the Structure object * @throws IOException ... */ public Structure getStructure(String filename) throws IOException { File f = new File(filename); return getStructure(f); } /** opens filename, parses it and returns a Structure object * * @param filename a File object * @return the Structure object * @throws IOException ... */ public Structure getStructure(File filename) throws IOException { InputStreamProvider isp = new InputStreamProvider(); InputStream inStream = isp.getInputStream(filename); PDBFileParser pdbpars = new PDBFileParser(); pdbpars.setParseSecStruc(parseSecStruc); pdbpars.setAlignSeqRes(alignSeqRes); pdbpars.setParseCAOnly(parseCAOnly); Structure struc = pdbpars.parsePDBFile(inStream) ; return struc ; } }
src/org/biojava/bio/structure/io/PDBFileReader.java
/* * BioJava development code * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. If you do not have a copy, * see: * * http://www.gnu.org/copyleft/lesser.html * * Copyright for this code is held jointly by the individual * authors. These should be listed in @author doc comments. * * For more information on the BioJava project and its aims, * or to join the biojava-l mailing list, visit the home page * at: * * http://www.biojava.org/ * * Created on 16.03.2004 * @author Andreas Prlic * * */ package org.biojava.bio.structure.io; import java.io.BufferedReader; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintWriter; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.zip.GZIPOutputStream; import org.biojava.bio.structure.AminoAcid; import org.biojava.bio.structure.Chain; import org.biojava.bio.structure.Group; import org.biojava.bio.structure.GroupIterator; import org.biojava.bio.structure.Structure; import org.biojava.utils.io.InputStreamProvider; /** * <p> * The wrapper class for parsing a PDB file. * </p> * * * <p> * Several flags can be set for this class * <ul> * <li> {@link #setParseCAOnly} - parse only the Atom records for C-alpha atoms (default:false)</li> * <li> {@link #setParseSecStruc} - a flag if the secondary structure information from the PDB file (author's assignment) should be parsed. * If true the assignment can be accessed through {@link AminoAcid}.getSecStruc(); (default:false)</li> * <li> {@link #setAlignSeqRes(boolean)} - should the AminoAcid sequences from the SEQRES * and ATOM records of a PDB file be aligned? (default:true)</li> * <li> {@link #setAutoFetch(boolean)} - if the PDB file can not be found locally, should it be fetched * from the EBI - ftp server? (default:false)</li> * </ul> * </p> * * * *<h2>Example</h2> * <p> * Q: How can I get a Structure object from a PDB file? * </p> * <p> * A: * <pre> public {@link Structure} loadStructure(String pathToPDBFile){ {@link PDBFileReader} pdbreader = new {@link PDBFileReader}(); {@link Structure} structure = null; try{ structure = pdbreader.getStructure(pathToPDBFile); System.out.println(structure); } catch (IOException e) { e.printStackTrace(); } return structure; } </pre> * * Access PDB files from a directory, take care of compressed PDB files * <pre> * public {@link Structure} loadStructureById() { String path = "/path/to/PDB/directory/"; {@link PDBFileReader} pdbreader = new {@link PDBFileReader}(); pdbreader.setPath(path); {@link Structure} structure = null; try { structure = pdbreader.getStructureById("5pti"); } catch (IOException e){ e.printStackTrace(); } return structure; } </pre> * * * @author Andreas Prlic * */ public class PDBFileReader implements StructureIOFile { // a list of big pdb files for testing // "1htq", // "1c2w", // "1ffk", // "1giy", // "1j5a", // "1jj2", // "1jzx", // "1jzy", // "1jzz", // "1k01", // "1k73", // "1k8a", // "1k9m", // "1kc8", // "1kd1", // "1kqs", // "1m1k", // "1m90", // "1mkz", // "1ml5", // "1n8r", String path ; List<String> extensions ; boolean parseSecStruc; boolean autoFetch; boolean parseCAOnly; boolean alignSeqRes; public static void main(String[] args){ String filename = "/path/to/PDBFile.pdb" ; // also see the demos PDBFileReader pdbreader = new PDBFileReader(); pdbreader.setParseSecStruc(true); pdbreader.setAlignSeqRes(true); pdbreader.setParseCAOnly(false); pdbreader.setAutoFetch(true); try{ Structure struc = pdbreader.getStructure(filename); System.out.println(struc); GroupIterator gi = new GroupIterator(struc); while (gi.hasNext()){ Group g = (Group) gi.next(); Chain c = g.getParent(); if ( g instanceof AminoAcid ){ AminoAcid aa = (AminoAcid)g; Map<String,String> sec = aa.getSecStruc(); System.out.println(c.getName() + " " + g + " " + sec); } } } catch (Exception e) { e.printStackTrace(); } } public PDBFileReader() { extensions = new ArrayList<String>(); path = "" ; extensions.add(".ent"); extensions.add(".pdb"); extensions.add(".ent.gz"); extensions.add(".pdb.gz"); extensions.add(".ent.Z"); extensions.add(".pdb.Z"); parseSecStruc = false; autoFetch = false; parseCAOnly = false; alignSeqRes = true; } /** return the flag if only the CA atoms should be parsed * * @return flag if CA only should be read */ public boolean isParseCAOnly() { return parseCAOnly; } /** only the CA atoms should be parsed from the PDB file * * @param parseCAOnly */ public void setParseCAOnly(boolean parseCAOnly) { this.parseCAOnly = parseCAOnly; } /** get the flag if the SEQRES and ATOM amino acids are going to be aligned * * @return flag */ public boolean isAlignSeqRes() { return alignSeqRes; } /** set the flag if the SEQRES and ATOM amino acids should be aligned and linked * * @param alignSeqRes */ public void setAlignSeqRes(boolean alignSeqRes) { this.alignSeqRes = alignSeqRes; } /** should the parser to fetch missing PDB files from the EBI FTP server automatically? * default is false * @return flag */ public boolean isAutoFetch() { return autoFetch; } /** tell the parser to fetch missing PDB files from the EBI FTP server automatically. * * default is false. If true, new PDB files will be automatically stored in the Path and gzip compressed. * * @param autoFetch */ public void setAutoFetch(boolean autoFetch) { this.autoFetch = autoFetch; } /* A flag to tell the parser to parse the Author's secondary structure assignment from the file * default is set to false, i.e. do NOT parse. * @param parseSecStruc */ public boolean isParseSecStruc() { return parseSecStruc; } /* A flag to tell the parser to parse the Author's secondary structure assignment from the file * */ public void setParseSecStruc(boolean parseSecStruc) { this.parseSecStruc = parseSecStruc; } /** directory where to find PDB files */ public void setPath(String p){ path = p ; } /** * Returns the path value. * @return a String representing the path value * @see #setPath * */ public String getPath() { return path ; } /** define supported file extensions * compressed extensions .Z,.gz do not need to be specified * they are dealt with automatically. */ public void addExtension(String s){ //System.out.println("add Extension "+s); extensions.add(s); } /** clear the supported file extensions * */ public void clearExtensions(){ extensions.clear(); } /** try to find the file in the filesystem and return a filestream in order to parse it * rules how to find file * - first check: if file is in path specified by PDBpath * - secnd check: if not found check in PDBpath/xy/ where xy is second and third char of PDBcode. * if autoFetch is set it will try to download missing PDB files automatically. */ private InputStream getInputStream(String pdbId) throws IOException { //System.out.println("checking file"); // compression formats supported // this has been moved to InputStreamProvider ... //String[] str = {".gz",".zip",".Z"}; //ArrayList compressions = new ArrayList( Arrays.asList( str ) ); InputStream inputStream =null; String pdbFile = null ; File f = null ; // this are the possible PDB file names... String fpath = path+"/"+pdbId; String ppath = path +"/pdb"+pdbId; String[] paths = new String[]{fpath,ppath}; for ( int p=0;p<paths.length;p++ ){ String testpath = paths[p]; //System.out.println(testpath); for (int i=0 ; i<extensions.size();i++){ String ex = (String)extensions.get(i) ; //System.out.println("PDBFileReader testing: "+testpath+ex); f = new File(testpath+ex) ; if ( f.exists()) { //System.out.println("found!"); pdbFile = testpath+ex ; InputStreamProvider isp = new InputStreamProvider(); inputStream = isp.getInputStream(pdbFile); break; } if ( pdbFile != null) break; } } if ( pdbFile == null ) { if ( autoFetch) return downloadAndGetInputStream(pdbId); String message = "no structure with PDB code " + pdbId + " found!" ; throw new IOException (message); } return inputStream ; } private File downloadPDB(String pdbId){ File tempFile = new File(path+"/"+pdbId+".pdb.gz"); File pdbHome = new File(path); if ( ! pdbHome.canWrite() ){ System.err.println("can not write to " + pdbHome); return null; } String ftp = String.format("ftp://ftp.ebi.ac.uk/pub/databases/msd/pdb_uncompressed/pdb%s.ent", pdbId.toLowerCase()); System.out.println("Fetching " + ftp); try { URL url = new URL(ftp); InputStream conn = url.openStream(); // prepare destination System.out.println("writing to " + tempFile); FileOutputStream outPut = new FileOutputStream(tempFile); GZIPOutputStream gzOutPut = new GZIPOutputStream(outPut); PrintWriter pw = new PrintWriter(gzOutPut); BufferedReader fileBuffer = new BufferedReader(new InputStreamReader(conn)); String line; while ((line = fileBuffer.readLine()) != null) { pw.println(line); } pw.flush(); pw.close(); outPut.close(); conn.close(); } catch (Exception e){ e.printStackTrace(); return null; } return tempFile; } private InputStream downloadAndGetInputStream(String pdbId) throws IOException{ //PDBURLReader reader = new PDBURLReader(); //Structure s = reader.getStructureById(pdbId); File tmp = downloadPDB(pdbId); if ( tmp != null ) { InputStreamProvider prov = new InputStreamProvider(); return prov.getInputStream(tmp); } else { throw new IOException("could not find PDB " + pdbId + " in file system and also could not download"); } } /** load a structure from local file system and return a PDBStructure object * @param pdbId a String specifying the id value (PDB code) * @return the Structure object * @throws IOException ... */ public Structure getStructureById(String pdbId) throws IOException { InputStream inStream = getInputStream(pdbId); PDBFileParser pdbpars = new PDBFileParser(); pdbpars.setParseSecStruc(parseSecStruc); pdbpars.setAlignSeqRes(alignSeqRes); pdbpars.setParseCAOnly(parseCAOnly); Structure struc = pdbpars.parsePDBFile(inStream) ; return struc ; } /** opens filename, parses it and returns * aStructure object . * @param filename a String * @return the Structure object * @throws IOException ... */ public Structure getStructure(String filename) throws IOException { File f = new File(filename); return getStructure(f); } /** opens filename, parses it and returns a Structure object * * @param filename a File object * @return the Structure object * @throws IOException ... */ public Structure getStructure(File filename) throws IOException { InputStreamProvider isp = new InputStreamProvider(); InputStream inStream = isp.getInputStream(filename); PDBFileParser pdbpars = new PDBFileParser(); pdbpars.setParseSecStruc(parseSecStruc); pdbpars.setAlignSeqRes(alignSeqRes); pdbpars.setParseCAOnly(parseCAOnly); Structure struc = pdbpars.parsePDBFile(inStream) ; return struc ; } }
assuming if no path is set, PDB files are downloaded to local dir. git-svn-id: ed25c26de1c5325e8eb0deed0b990ab8af8a4def@6764 7c6358e6-4a41-0410-a743-a5b2a554c398
src/org/biojava/bio/structure/io/PDBFileReader.java
assuming if no path is set, PDB files are downloaded to local dir.
Java
lgpl-2.1
edac0ace031c8da1bb693dd37c7267f6f279e047
0
alkacon/opencms-core,gallardo/opencms-core,gallardo/opencms-core,gallardo/opencms-core,alkacon/opencms-core,alkacon/opencms-core,gallardo/opencms-core,alkacon/opencms-core
/* * File : $Source$ * Date : $Date$ * Version: $Revision$ * * This library is part of OpenCms - * the Open Source Content Management System * * Copyright (C) 2002 - 2008 Alkacon Software (http://www.alkacon.com) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * For further information about Alkacon Software, please see the * company website: http://www.alkacon.com * * For further information about OpenCms, please see the * project website: http://www.opencms.org * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package org.opencms.search.solr; import org.opencms.file.CmsObject; import org.opencms.file.CmsPropertyDefinition; import org.opencms.i18n.CmsEncoder; import org.opencms.main.OpenCms; import org.opencms.search.fields.CmsSearchField; import org.opencms.util.CmsPair; import org.opencms.util.CmsRequestUtil; import org.opencms.util.CmsStringUtil; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.common.params.CommonParams; /** * A Solr search query.<p> */ public class CmsSolrQuery extends SolrQuery { /** A constant to add the score field to the result documents. */ public static final String ALL_RETURN_FIELDS = "*,score"; /** The default facet date gap. */ public static final String DEFAULT_FACET_DATE_GAP = "+1DAY"; /** The default query. */ public static final String DEFAULT_QUERY = "*:*"; /** The query type. */ public static final String DEFAULT_QUERY_TYPE = "edismax"; /** The default search result count. */ public static final Integer DEFAULT_ROWS = new Integer(10); /** A constant to add the score field to the result documents. */ public static final String MINIMUM_FIELDS = CmsSearchField.FIELD_PATH + "," + CmsSearchField.FIELD_TYPE + "," + CmsSearchField.FIELD_SOLR_ID + "," + CmsSearchField.FIELD_ID; /** A constant to add the score field to the result documents. */ public static final String STRUCTURE_FIELDS = CmsSearchField.FIELD_PATH + "," + CmsSearchField.FIELD_TYPE + "," + CmsSearchField.FIELD_ID + "," + CmsSearchField.FIELD_CATEGORY + "," + CmsSearchField.FIELD_DATE_CONTENT + "," + CmsSearchField.FIELD_DATE_CREATED + "," + CmsSearchField.FIELD_DATE_EXPIRED + "," + CmsSearchField.FIELD_DATE_LASTMODIFIED + "," + CmsSearchField.FIELD_DATE_RELEASED + "," + CmsSearchField.FIELD_SUFFIX + "," + CmsSearchField.FIELD_DEPENDENCY_TYPE + "," + CmsSearchField.FIELD_DESCRIPTION + "," + CmsPropertyDefinition.PROPERTY_TITLE + CmsSearchField.FIELD_DYNAMIC_PROPERTIES + "," + CmsSearchField.FIELD_RESOURCE_LOCALES + "," + CmsSearchField.FIELD_CONTENT_LOCALES + "," + CmsSearchField.FIELD_SCORE + "," + CmsSearchField.FIELD_PARENT_FOLDERS; /** The serial version UID. */ private static final long serialVersionUID = -2387357736597627703L; /** The facet date gap to use for date facets. */ private String m_facetDateGap = DEFAULT_FACET_DATE_GAP; /** Ignore expiration flag. */ private boolean m_ignoreExpiration; /** The parameters given by the 'query string'. */ private Map<String, String[]> m_queryParameters = new HashMap<String, String[]>(); /** The search words. */ private String m_text; /** The name of the field to search the text in. */ private List<String> m_textSearchFields = new ArrayList<String>(); /** * Default constructor.<p> */ public CmsSolrQuery() { this(null, null); } /** * Public constructor.<p> * * @param cms the current OpenCms context * @param queryParams the Solr query parameters */ public CmsSolrQuery(CmsObject cms, Map<String, String[]> queryParams) { setQuery(DEFAULT_QUERY); setFields(ALL_RETURN_FIELDS); setRequestHandler(DEFAULT_QUERY_TYPE); setRows(DEFAULT_ROWS); // set the values from the request context if (cms != null) { setLocales(Collections.singletonList(cms.getRequestContext().getLocale())); setSearchRoots(Collections.singletonList(cms.getRequestContext().getSiteRoot() + "/")); } if (queryParams != null) { m_queryParameters = queryParams; } ensureParameters(); ensureReturnFields(); ensureExpiration(); } /** * Returns the resource type if only one is set as filter query.<p> * * @param fqs the field queries to check * * @return the type or <code>null</code> */ public static String getResourceType(String[] fqs) { String ret = null; int count = 0; if (fqs != null) { for (String fq : fqs) { if (fq.startsWith(CmsSearchField.FIELD_TYPE + ":")) { String val = fq.substring((CmsSearchField.FIELD_TYPE + ":").length()); val = val.replaceAll("\"", ""); if (OpenCms.getResourceManager().hasResourceType(val)) { count++; ret = val; } } } } return (count == 1) ? ret : null; } /** * Creates and adds a filter query.<p> * * @param fieldName the field name to create a filter query on * @param vals the values that should match for the given field * @param all <code>true</code> to combine the given values with 'AND', <code>false</code> for 'OR' * @param useQuotes <code>true</code> to surround the given values with double quotes, <code>false</code> otherwise */ public void addFilterQuery(String fieldName, List<String> vals, boolean all, boolean useQuotes) { if (getFilterQueries() != null) { for (String fq : getFilterQueries()) { if (fq.startsWith(fieldName + ":")) { removeFilterQuery(fq); } } } addFilterQuery(createFilterQuery(fieldName, vals, all, useQuotes)); } /** * Adds the given fields/orders to the existing sort fields.<p> * * @param sortFields the sortFields to set */ public void addSortFieldOrders(Map<String, ORDER> sortFields) { if ((sortFields != null) && !sortFields.isEmpty()) { // add the sort fields to the query for (Map.Entry<String, ORDER> entry : sortFields.entrySet()) { addSort(entry.getKey(), entry.getValue()); } } } /** * @see java.lang.Object#clone() */ @Override public CmsSolrQuery clone() { CmsSolrQuery sq = new CmsSolrQuery(null, CmsRequestUtil.createParameterMap(toString())); sq.m_ignoreExpiration = m_ignoreExpiration; return sq; } /** * Ensures that the initial request parameters will overwrite the member values.<p> * * You can initialize the query with an HTTP request parameter then make some method calls * and finally re-ensure that the initial request parameters will overwrite the changes * made in the meanwhile.<p> */ public void ensureParameters() { // overwrite already set values with values from query String if ((m_queryParameters != null) && !m_queryParameters.isEmpty()) { for (Map.Entry<String, String[]> entry : m_queryParameters.entrySet()) { if (!entry.getKey().equals(CommonParams.FQ)) { // add or replace all parameters from the query String setParam(entry.getKey(), entry.getValue()); } else { // special handling for filter queries replaceFilterQueries(entry.getValue()); } } } } /** * Removes the expiration flag. */ public void removeExpiration() { if (getFilterQueries() != null) { for (String fq : getFilterQueries()) { if (fq.startsWith(CmsSearchField.FIELD_DATE_EXPIRED + ":") || fq.startsWith(CmsSearchField.FIELD_DATE_RELEASED + ":")) { removeFilterQuery(fq); } } } m_ignoreExpiration = true; } /** * Sets the categories only if not set in the query parameters.<p> * * @param categories the categories to set */ public void setCategories(List<String> categories) { if ((categories != null) && !categories.isEmpty()) { addFilterQuery(CmsSearchField.FIELD_CATEGORY + CmsSearchField.FIELD_DYNAMIC_EXACT, categories, true, true); } } /** * Sets the categories only if not set in the query parameters.<p> * * @param categories the categories to set */ public void setCategories(String... categories) { setCategories(Arrays.asList(categories)); } /** * Sets date ranges.<p> * * This call will overwrite all existing date ranges for the given keys (name of the date facet field).<p> * * The parameter Map uses as:<p> * <ul> * <li><code>keys: </code>Solr field name {@link org.opencms.search.fields.CmsSearchField} and * <li><code>values: </code> pairs with min date as first and max date as second {@link org.opencms.util.CmsPair} * </ul> * Alternatively you can use Solr standard query syntax like:<p> * <ul> * <li><code>+created:[* TO NOW]</code> * <li><code>+lastmodified:[' + date + ' TO NOW]</code> * </ul> * whereby date is Solr formatted: * {@link org.opencms.search.CmsSearchUtil#getDateAsIso8601(Date)} * <p> * * @param dateRanges the ranges map with field name as key and a CmsPair with min date as first and max date as second */ public void setDateRanges(Map<String, CmsPair<Date, Date>> dateRanges) { if ((dateRanges != null) && !dateRanges.isEmpty()) { // remove the date ranges for (Map.Entry<String, CmsPair<Date, Date>> entry : dateRanges.entrySet()) { removeFacetField(entry.getKey()); } // add the date ranges for (Map.Entry<String, CmsPair<Date, Date>> entry : dateRanges.entrySet()) { addDateRangeFacet( entry.getKey(), entry.getValue().getFirst(), entry.getValue().getSecond(), m_facetDateGap); } } } /** * Sets the facetDateGap.<p> * * @param facetDateGap the facetDateGap to set */ public void setFacetDateGap(String facetDateGap) { m_facetDateGap = facetDateGap; } /** * Sets the highlightFields.<p> * * @param highlightFields the highlightFields to set */ public void setHighlightFields(List<String> highlightFields) { setParam("hl.fl", CmsStringUtil.listAsString(highlightFields, ",")); } /** * Sets the highlightFields.<p> * * @param highlightFields the highlightFields to set */ public void setHighlightFields(String... highlightFields) { setParam("hl.fl", CmsStringUtil.arrayAsString(highlightFields, ",")); } /** * Sets the locales only if not set in the query parameters.<p> * * @param locales the locales to set */ public void setLocales(List<Locale> locales) { m_textSearchFields = new ArrayList<String>(); if ((locales == null) || locales.isEmpty()) { m_textSearchFields.add(CmsSearchField.FIELD_TEXT); if (getFilterQueries() != null) { for (String fq : getFilterQueries()) { if (fq.startsWith(CmsSearchField.FIELD_CONTENT_LOCALES + ":")) { removeFilterQuery(fq); } } } } else { List<String> localeStrings = new ArrayList<String>(); for (Locale locale : locales) { localeStrings.add(locale.toString()); if (!m_textSearchFields.contains("text") && !OpenCms.getLocaleManager().getAvailableLocales().contains(locale)) { // if the locale is not configured in the opencms-system.xml // there will no localized text fields, so take the general one m_textSearchFields.add("text"); } else { m_textSearchFields.add("text_" + locale); } } addFilterQuery(CmsSearchField.FIELD_CONTENT_LOCALES, localeStrings, false, false); } if (m_text != null) { setText(m_text); } } /** * Sets the locales only if not set in the query parameters.<p> * * @param locales the locales to set */ public void setLocales(Locale... locales) { setLocales(Arrays.asList(locales)); } /** * @see org.apache.solr.client.solrj.SolrQuery#setRequestHandler(java.lang.String) */ @Override public SolrQuery setRequestHandler(String qt) { SolrQuery q = super.setRequestHandler(qt); if (m_text != null) { setText(m_text); } return q; } /** * Sets the resource types only if not set in the query parameters.<p> * * @param resourceTypes the resourceTypes to set */ public void setResourceTypes(List<String> resourceTypes) { if ((resourceTypes != null) && !resourceTypes.isEmpty()) { addFilterQuery(CmsSearchField.FIELD_TYPE, resourceTypes, false, false); } } /** * Sets the resource types only if not set in the query parameters.<p> * * @param resourceTypes the resourceTypes to set */ public void setResourceTypes(String... resourceTypes) { setResourceTypes(Arrays.asList(resourceTypes)); } /** * Sets the requested return fields, but ensures that at least the 'path' and the 'type', 'id' and 'solr_id' * are part of the fields returned field list.<p> * * @param returnFields the really requested return fields. * * @see CommonParams#FL */ public void setReturnFields(String returnFields) { ensureReturnFields(new String[] {returnFields}); } /** * Sets the search roots only if not set as query parameter.<p> * * @param searchRoots the searchRoots to set */ public void setSearchRoots(List<String> searchRoots) { if ((searchRoots != null) && !searchRoots.isEmpty()) { addFilterQuery(CmsSearchField.FIELD_PARENT_FOLDERS, searchRoots, false, true); } } /** * Sets the search roots only if not set as query parameter.<p> * * @param searchRoots the searchRoots to set */ public void setSearchRoots(String... searchRoots) { setSearchRoots(Arrays.asList(searchRoots)); } /** * Sets the return fields 'fl' to a predefined set that does not contain content specific fields.<p> * * @param structureQuery the <code>true</code> to return only structural fields */ public void setStructureQuery(boolean structureQuery) { if (structureQuery) { setFields(STRUCTURE_FIELDS); } } /** * Sets the text.<p> * * @param text the text to set */ public void setText(String text) { m_text = text; if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(text)) { setQuery(createTextQuery(text)); } } /** * Sets the textSearchFields.<p> * * @param textSearchFields the textSearchFields to set */ public void setTextSearchFields(List<String> textSearchFields) { m_textSearchFields = textSearchFields; if (m_text != null) { setText(m_text); } } /** * Sets the textSearchFields.<p> * * @param textSearchFields the textSearchFields to set */ public void setTextSearchFields(String... textSearchFields) { setTextSearchFields(Arrays.asList(textSearchFields)); } /** * @see org.apache.solr.common.params.ModifiableSolrParams#toString() */ @Override public String toString() { return CmsEncoder.decode(super.toString()); } /** * Creates a filter query on the given field name.<p> * * Creates and adds a filter query.<p> * * @param fieldName the field name to create a filter query on * @param vals the values that should match for the given field * @param all <code>true</code> to combine the given values with 'AND', <code>false</code> for 'OR' * @param useQuotes <code>true</code> to surround the given values with double quotes, <code>false</code> otherwise * * @return a filter query String e.g. <code>fq=fieldname:val1</code> */ private String createFilterQuery(String fieldName, List<String> vals, boolean all, boolean useQuotes) { String filterQuery = null; if ((vals != null)) { if (vals.size() == 1) { if (useQuotes) { filterQuery = fieldName + ":" + "\"" + vals.get(0) + "\""; } else { filterQuery = fieldName + ":" + vals.get(0); } } else if (vals.size() > 1) { filterQuery = fieldName + ":("; for (int j = 0; j < vals.size(); j++) { String val; if (useQuotes) { val = "\"" + vals.get(j) + "\""; } else { val = vals.get(j); } filterQuery += val; if (vals.size() > (j + 1)) { if (all) { filterQuery += " AND "; } else { filterQuery += " OR "; } } } filterQuery += ")"; } } return filterQuery; } /** * Creates a OR combined 'q' parameter.<p> * * @param text the query string. * * @return returns the 'q' parameter */ private String createTextQuery(String text) { if (m_textSearchFields.isEmpty()) { m_textSearchFields.add(CmsSearchField.FIELD_TEXT); } String q = "{!q.op=OR type=" + getRequestHandler() + " qf="; boolean first = true; for (String textField : m_textSearchFields) { if (!first) { q += " "; } q += textField; } q += "}" + text; return q; } /** * Ensures that expired and not yet released resources are not returned by default.<p> */ private void ensureExpiration() { boolean expirationDateSet = false; boolean releaseDateSet = false; if (getFilterQueries() != null) { for (String fq : getFilterQueries()) { if (fq.startsWith(CmsSearchField.FIELD_DATE_EXPIRED + ":")) { expirationDateSet = true; } if (fq.startsWith(CmsSearchField.FIELD_DATE_RELEASED + ":")) { releaseDateSet = true; } } } if (!expirationDateSet) { addFilterQuery(CmsSearchField.FIELD_DATE_EXPIRED + ":[NOW TO *]"); } if (!releaseDateSet) { addFilterQuery(CmsSearchField.FIELD_DATE_RELEASED + ":[* TO NOW]"); } } /** * Ensures that at least the 'path' and the 'type', 'id' and 'solr_id' are part of the fields returned field list.<p> * * @see CommonParams#FL */ private void ensureReturnFields() { ensureReturnFields(getParams(CommonParams.FL)); } /** * Ensures that at least the 'path' and the 'type', 'id' and 'solr_id' are part of the fields returned field list.<p> * * @param requestedReturnFields the really requested return fields. * * @see CommonParams#FL */ private void ensureReturnFields(String[] requestedReturnFields) { if ((requestedReturnFields != null) && (requestedReturnFields.length > 0)) { List<String> result = new ArrayList<String>(); for (String field : requestedReturnFields) { String commasep = field.replaceAll(" ", ","); List<String> list = CmsStringUtil.splitAsList(commasep, ','); if (!list.contains("*")) { for (String reqField : CmsStringUtil.splitAsList(MINIMUM_FIELDS, ",")) { if (!list.contains(reqField)) { list.add(reqField); } } } result.addAll(list); } setParam(CommonParams.FL, CmsStringUtil.arrayAsString(result.toArray(new String[0]), ",")); } } /** * Removes those filter queries that restrict the fields used in the given filter query Strings.<p> * * Searches in the given Strings for a ":", then takes the field name part * and removes the already set filter queries queries that are matching the same field name.<p> * * @param fqs the filter query Strings in the format <code>fq=fieldname:value</code> that should be removed */ private void removeFilterQueries(String[] fqs) { // iterate over the given filter queries to remove for (String fq : fqs) { int idx = fq.indexOf(':'); if (idx != -1) { // get the field name of the fq to remove String fieldName = fq.substring(0, idx); // iterate over the fqs of the already existing fqs from the solr query if (getFilterQueries() != null) { for (String sfq : getFilterQueries()) { if (sfq.startsWith(fieldName + ":")) { // there exists a filter query for exact the same field, remove it removeFilterQuery(sfq); } } } } } } /** * Removes the given filter queries, if already set and then adds the filter queries again.<p> * * @param fqs the filter queries to remove */ private void replaceFilterQueries(String[] fqs) { removeFilterQueries(fqs); addFilterQuery(fqs); } }
src/org/opencms/search/solr/CmsSolrQuery.java
/* * File : $Source$ * Date : $Date$ * Version: $Revision$ * * This library is part of OpenCms - * the Open Source Content Management System * * Copyright (C) 2002 - 2008 Alkacon Software (http://www.alkacon.com) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * For further information about Alkacon Software, please see the * company website: http://www.alkacon.com * * For further information about OpenCms, please see the * project website: http://www.opencms.org * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package org.opencms.search.solr; import org.opencms.file.CmsObject; import org.opencms.file.CmsPropertyDefinition; import org.opencms.i18n.CmsEncoder; import org.opencms.main.OpenCms; import org.opencms.search.fields.CmsSearchField; import org.opencms.util.CmsPair; import org.opencms.util.CmsRequestUtil; import org.opencms.util.CmsStringUtil; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.common.params.CommonParams; /** * A Solr search query.<p> */ public class CmsSolrQuery extends SolrQuery { /** A constant to add the score field to the result documents. */ public static final String ALL_RETURN_FIELDS = "*,score"; /** The default facet date gap. */ public static final String DEFAULT_FACET_DATE_GAP = "+1DAY"; /** The default query. */ public static final String DEFAULT_QUERY = "*:*"; /** The query type. */ public static final String DEFAULT_QUERY_TYPE = "edismax"; /** The default search result count. */ public static final Integer DEFAULT_ROWS = new Integer(10); /** A constant to add the score field to the result documents. */ public static final String MINIMUM_FIELDS = CmsSearchField.FIELD_PATH + "," + CmsSearchField.FIELD_TYPE + "," + CmsSearchField.FIELD_SOLR_ID + "," + CmsSearchField.FIELD_ID; /** A constant to add the score field to the result documents. */ public static final String STRUCTURE_FIELDS = CmsSearchField.FIELD_PATH + "," + CmsSearchField.FIELD_TYPE + "," + CmsSearchField.FIELD_ID + "," + CmsSearchField.FIELD_CATEGORY + "," + CmsSearchField.FIELD_DATE_CONTENT + "," + CmsSearchField.FIELD_DATE_CREATED + "," + CmsSearchField.FIELD_DATE_EXPIRED + "," + CmsSearchField.FIELD_DATE_LASTMODIFIED + "," + CmsSearchField.FIELD_DATE_RELEASED + "," + CmsSearchField.FIELD_SUFFIX + "," + CmsSearchField.FIELD_DEPENDENCY_TYPE + "," + CmsSearchField.FIELD_DESCRIPTION + "," + CmsPropertyDefinition.PROPERTY_TITLE + CmsSearchField.FIELD_DYNAMIC_PROPERTIES + "," + CmsSearchField.FIELD_RESOURCE_LOCALES + "," + CmsSearchField.FIELD_CONTENT_LOCALES + "," + CmsSearchField.FIELD_SCORE + "," + CmsSearchField.FIELD_PARENT_FOLDERS; /** The serial version UID. */ private static final long serialVersionUID = -2387357736597627703L; /** The facet date gap to use for date facets. */ private String m_facetDateGap = DEFAULT_FACET_DATE_GAP; /** Ignore expiration flag. */ private boolean m_ignoreExpiration; /** The parameters given by the 'query string'. */ private Map<String, String[]> m_queryParameters = new HashMap<String, String[]>(); /** The search words. */ private String m_text; /** The name of the field to search the text in. */ private List<String> m_textSearchFields = new ArrayList<String>(); /** * Default constructor.<p> */ public CmsSolrQuery() { this(null, null); } /** * Public constructor.<p> * * @param cms the current OpenCms context * @param queryParams the Solr query parameters */ public CmsSolrQuery(CmsObject cms, Map<String, String[]> queryParams) { setQuery(DEFAULT_QUERY); setFields(ALL_RETURN_FIELDS); setRequestHandler(DEFAULT_QUERY_TYPE); setRows(DEFAULT_ROWS); // set the values from the request context if (cms != null) { setLocales(Collections.singletonList(cms.getRequestContext().getLocale())); setSearchRoots(Collections.singletonList(cms.getRequestContext().getSiteRoot() + "/")); } if (queryParams != null) { m_queryParameters = queryParams; } ensureParameters(); ensureReturnFields(); ensureExpiration(); } /** * Returns the resource type if only one is set as filter query.<p> * * @param fqs the field queries to check * * @return the type or <code>null</code> */ public static String getResourceType(String[] fqs) { String ret = null; int count = 0; if (fqs != null) { for (String fq : fqs) { if (fq.startsWith(CmsSearchField.FIELD_TYPE + ":")) { String val = fq.substring((CmsSearchField.FIELD_TYPE + ":").length()); val = val.replaceAll("\"", ""); if (OpenCms.getResourceManager().hasResourceType(val)) { count++; ret = val; } } } } return (count == 1) ? ret : null; } /** * Creates and adds a filter query.<p> * * @param fieldName the field name to create a filter query on * @param vals the values that should match for the given field * @param all <code>true</code> to combine the given values with 'AND', <code>false</code> for 'OR' * @param useQuotes <code>true</code> to surround the given values with double quotes, <code>false</code> otherwise */ public void addFilterQuery(String fieldName, List<String> vals, boolean all, boolean useQuotes) { if (getFilterQueries() != null) { for (String fq : getFilterQueries()) { if (fq.startsWith(fieldName + ":")) { removeFilterQuery(fq); } } } addFilterQuery(createFilterQuery(fieldName, vals, all, useQuotes)); } /** * Adds the given fields/orders to the existing sort fields.<p> * * @param sortFields the sortFields to set */ public void addSortFieldOrders(Map<String, ORDER> sortFields) { if ((sortFields != null) && !sortFields.isEmpty()) { // add the sort fields to the query for (Map.Entry<String, ORDER> entry : sortFields.entrySet()) { addSort(entry.getKey(), entry.getValue()); } } } /** * @see java.lang.Object#clone() */ @Override public CmsSolrQuery clone() { CmsSolrQuery sq = new CmsSolrQuery(null, CmsRequestUtil.createParameterMap(toString())); sq.m_ignoreExpiration = m_ignoreExpiration; return sq; } /** * Ensures that the initial request parameters will overwrite the member values.<p> * * You can initialize the query with an HTTP request parameter then make some method calls * and finally re-ensure that the initial request parameters will overwrite the changes * made in the meanwhile.<p> */ public void ensureParameters() { // overwrite already set values with values from query String if ((m_queryParameters != null) && !m_queryParameters.isEmpty()) { for (Map.Entry<String, String[]> entry : m_queryParameters.entrySet()) { if (!entry.getKey().equals(CommonParams.FQ)) { // add or replace all parameters from the query String setParam(entry.getKey(), entry.getValue()); } else { // special handling for filter queries replaceFilterQueries(entry.getValue()); } } } } /** * Removes the expiration flag. */ public void removeExpiration() { if (getFilterQueries() != null) { for (String fq : getFilterQueries()) { if (fq.startsWith(CmsSearchField.FIELD_DATE_EXPIRED + ":") || fq.startsWith(CmsSearchField.FIELD_DATE_RELEASED + ":")) { removeFilterQuery(fq); } } } m_ignoreExpiration = true; } /** * Sets the categories only if not set in the query parameters.<p> * * @param categories the categories to set */ public void setCategories(List<String> categories) { if ((categories != null) && !categories.isEmpty()) { addFilterQuery(CmsSearchField.FIELD_CATEGORY + CmsSearchField.FIELD_DYNAMIC_EXACT, categories, true, true); } } /** * Sets the categories only if not set in the query parameters.<p> * * @param categories the categories to set */ public void setCategories(String... categories) { setCategories(Arrays.asList(categories)); } /** * Sets date ranges.<p> * * This call will overwrite all existing date ranges for the given keys (name of the date facet field).<p> * * The parameter Map uses as:<p> * <ul> * <li><code>keys: </code>Solr field name {@link org.opencms.search.fields.CmsSearchField} and * <li><code>values: </code> pairs with min date as first and max date as second {@link org.opencms.util.CmsPair} * </ul> * Alternatively you can use Solr standard query syntax like:<p> * <ul> * <li><code>+created:[* TO NOW]</code> * <li><code>+lastmodified:[' + date + ' TO NOW]</code> * </ul> * whereby date is Solr formatted: * {@link org.opencms.search.CmsSearchUtil#getDateAsIso8601(Date)} * <p> * * @param dateRanges the ranges map with field name as key and a CmsPair with min date as first and max date as second */ public void setDateRanges(Map<String, CmsPair<Date, Date>> dateRanges) { if ((dateRanges != null) && !dateRanges.isEmpty()) { // remove the date ranges for (Map.Entry<String, CmsPair<Date, Date>> entry : dateRanges.entrySet()) { removeFacetField(entry.getKey()); } // add the date ranges for (Map.Entry<String, CmsPair<Date, Date>> entry : dateRanges.entrySet()) { addDateRangeFacet( entry.getKey(), entry.getValue().getFirst(), entry.getValue().getSecond(), m_facetDateGap); } } } /** * Sets the facetDateGap.<p> * * @param facetDateGap the facetDateGap to set */ public void setFacetDateGap(String facetDateGap) { m_facetDateGap = facetDateGap; } /** * Sets the highlightFields.<p> * * @param highlightFields the highlightFields to set */ public void setHighlightFields(List<String> highlightFields) { setParam("hl.fl", CmsStringUtil.listAsString(highlightFields, ",")); } /** * Sets the highlightFields.<p> * * @param highlightFields the highlightFields to set */ public void setHighlightFields(String... highlightFields) { setParam("hl.fl", CmsStringUtil.arrayAsString(highlightFields, ",")); } /** * Sets the locales only if not set in the query parameters.<p> * * @param locales the locales to set */ public void setLocales(List<Locale> locales) { m_textSearchFields = new ArrayList<String>(); if ((locales == null) || locales.isEmpty()) { m_textSearchFields.add(CmsSearchField.FIELD_TEXT); if (getFilterQueries() != null) { for (String fq : getFilterQueries()) { if (fq.startsWith(CmsSearchField.FIELD_CONTENT_LOCALES + ":")) { removeFilterQuery(fq); } } } } else { List<String> localeStrings = new ArrayList<String>(); for (Locale locale : locales) { localeStrings.add(locale.toString()); if (!m_textSearchFields.contains("text") && !OpenCms.getLocaleManager().getAvailableLocales().contains(locale)) { // if the locale is not configured in the opencms-system.xml // there will no localized text fields, so take the general one m_textSearchFields.add("text"); } else { m_textSearchFields.add("text_" + locale); } } addFilterQuery(CmsSearchField.FIELD_CONTENT_LOCALES, localeStrings, false, false); } if (m_text != null) { setText(m_text); } } /** * Sets the locales only if not set in the query parameters.<p> * * @param locales the locales to set */ public void setLocales(Locale... locales) { setLocales(Arrays.asList(locales)); } /** * @see org.apache.solr.client.solrj.SolrQuery#setRequestHandler(java.lang.String) */ @Override public SolrQuery setRequestHandler(String qt) { SolrQuery q = super.setRequestHandler(qt); if (m_text != null) { setText(m_text); } return q; } /** * Sets the resource types only if not set in the query parameters.<p> * * @param resourceTypes the resourceTypes to set */ public void setResourceTypes(List<String> resourceTypes) { if ((resourceTypes != null) && !resourceTypes.isEmpty()) { addFilterQuery(CmsSearchField.FIELD_TYPE, resourceTypes, false, false); } } /** * Sets the resource types only if not set in the query parameters.<p> * * @param resourceTypes the resourceTypes to set */ public void setResourceTypes(String... resourceTypes) { setResourceTypes(Arrays.asList(resourceTypes)); } /** * Sets the search roots only if not set as query parameter.<p> * * @param searchRoots the searchRoots to set */ public void setSearchRoots(List<String> searchRoots) { if ((searchRoots != null) && !searchRoots.isEmpty()) { addFilterQuery(CmsSearchField.FIELD_PARENT_FOLDERS, searchRoots, false, true); } } /** * Sets the search roots only if not set as query parameter.<p> * * @param searchRoots the searchRoots to set */ public void setSearchRoots(String... searchRoots) { setSearchRoots(Arrays.asList(searchRoots)); } /** * Sets the return fields 'fl' to a predefined set that does not contain content specific fields.<p> * * @param structureQuery the <code>true</code> to return only structural fields */ public void setStructureQuery(boolean structureQuery) { if (structureQuery) { setFields(STRUCTURE_FIELDS); } } /** * Sets the text.<p> * * @param text the text to set */ public void setText(String text) { m_text = text; if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(text)) { setQuery(createTextQuery(text)); } } /** * Sets the textSearchFields.<p> * * @param textSearchFields the textSearchFields to set */ public void setTextSearchFields(List<String> textSearchFields) { m_textSearchFields = textSearchFields; if (m_text != null) { setText(m_text); } } /** * Sets the textSearchFields.<p> * * @param textSearchFields the textSearchFields to set */ public void setTextSearchFields(String... textSearchFields) { setTextSearchFields(Arrays.asList(textSearchFields)); } /** * @see org.apache.solr.common.params.ModifiableSolrParams#toString() */ @Override public String toString() { return CmsEncoder.decode(super.toString()); } /** * Creates a filter query on the given field name.<p> * * Creates and adds a filter query.<p> * * @param fieldName the field name to create a filter query on * @param vals the values that should match for the given field * @param all <code>true</code> to combine the given values with 'AND', <code>false</code> for 'OR' * @param useQuotes <code>true</code> to surround the given values with double quotes, <code>false</code> otherwise * * @return a filter query String e.g. <code>fq=fieldname:val1</code> */ private String createFilterQuery(String fieldName, List<String> vals, boolean all, boolean useQuotes) { String filterQuery = null; if ((vals != null)) { if (vals.size() == 1) { if (useQuotes) { filterQuery = fieldName + ":" + "\"" + vals.get(0) + "\""; } else { filterQuery = fieldName + ":" + vals.get(0); } } else if (vals.size() > 1) { filterQuery = fieldName + ":("; for (int j = 0; j < vals.size(); j++) { String val; if (useQuotes) { val = "\"" + vals.get(j) + "\""; } else { val = vals.get(j); } filterQuery += val; if (vals.size() > (j + 1)) { if (all) { filterQuery += " AND "; } else { filterQuery += " OR "; } } } filterQuery += ")"; } } return filterQuery; } /** * Creates a OR combined 'q' parameter.<p> * * @param text * * @return returns the 'q' parameter */ private String createTextQuery(String text) { if (m_textSearchFields.isEmpty()) { m_textSearchFields.add(CmsSearchField.FIELD_TEXT); } String q = "{!q.op=OR type=" + getRequestHandler() + " qf="; boolean first = true; for (String textField : m_textSearchFields) { if (!first) { q += " "; } q += textField; } q += "}" + text; return q; } /** * Ensures that expired and not yet released resources are not returned by default.<p> */ private void ensureExpiration() { boolean expirationDateSet = false; boolean releaseDateSet = false; if (getFilterQueries() != null) { for (String fq : getFilterQueries()) { if (fq.startsWith(CmsSearchField.FIELD_DATE_EXPIRED + ":")) { expirationDateSet = true; } if (fq.startsWith(CmsSearchField.FIELD_DATE_RELEASED + ":")) { releaseDateSet = true; } } } if (!expirationDateSet) { addFilterQuery(CmsSearchField.FIELD_DATE_EXPIRED + ":[NOW TO *]"); } if (!releaseDateSet) { addFilterQuery(CmsSearchField.FIELD_DATE_RELEASED + ":[* TO NOW]"); } } /** * Ensures that at least the 'path' and the 'type' are part of the fields returned field list.<p> * * @see CommonParams#FL */ private void ensureReturnFields() { String[] fl = getParams(CommonParams.FL); if ((fl != null) && (fl.length > 0)) { List<String> result = new ArrayList<String>(); for (String field : fl) { String commasep = field.replaceAll(" ", ","); List<String> list = CmsStringUtil.splitAsList(commasep, ','); if (!list.contains("*")) { for (String reqField : CmsStringUtil.splitAsList(MINIMUM_FIELDS, ",")) { if (!list.contains(reqField)) { list.add(reqField); } } } result.addAll(list); } setParam(CommonParams.FL, CmsStringUtil.arrayAsString(result.toArray(new String[0]), ",")); } } /** * Removes those filter queries that restrict the fields used in the given filter query Strings.<p> * * Searches in the given Strings for a ":", then takes the field name part * and removes the already set filter queries queries that are matching the same field name.<p> * * @param fqs the filter query Strings in the format <code>fq=fieldname:value</code> that should be removed */ private void removeFilterQueries(String[] fqs) { // iterate over the given filter queries to remove for (String fq : fqs) { int idx = fq.indexOf(':'); if (idx != -1) { // get the field name of the fq to remove String fieldName = fq.substring(0, idx); // iterate over the fqs of the already existing fqs from the solr query if (getFilterQueries() != null) { for (String sfq : getFilterQueries()) { if (sfq.startsWith(fieldName + ":")) { // there exists a filter query for exact the same field, remove it removeFilterQuery(sfq); } } } } } } /** * Removes the given filter queries, if already set and then adds the filter queries again.<p> * * @param fqs the filter queries to remove */ private void replaceFilterQueries(String[] fqs) { removeFilterQueries(fqs); addFilterQuery(fqs); } }
CmsSolrQuery: Added method "setReturnFields" that will add the minimally required return fields to the requested ones.
src/org/opencms/search/solr/CmsSolrQuery.java
CmsSolrQuery: Added method "setReturnFields" that will add the minimally required return fields to the requested ones.
Java
lgpl-2.1
fb82b4b5fe69e312c8a13ddbc612a62f6b2e01c5
0
deegree/deegree3,deegree/deegree3,deegree/deegree3,deegree/deegree3,deegree/deegree3
//$HeadURL: svn+ssh://[email protected]/deegree/deegree3/trunk/deegree-core/deegree-core-rendering-2d/src/main/java/org/deegree/rendering/r2d/se/parser/PostgreSQLWriter.java $ /*---------------------------------------------------------------------------- This file is part of deegree, http://deegree.org/ Copyright (C) 2001-2010 by: - Department of Geography, University of Bonn - and - lat/lon GmbH - This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Contact information: lat/lon GmbH Aennchenstr. 19, 53177 Bonn Germany http://lat-lon.de/ Department of Geography, University of Bonn Prof. Dr. Klaus Greve Postfach 1147, 53001 Bonn Germany http://www.geographie.uni-bonn.de/deegree/ e-mail: [email protected] ----------------------------------------------------------------------------*/ package org.deegree.style.se.parser; import static java.lang.Double.NEGATIVE_INFINITY; import static java.lang.Double.POSITIVE_INFINITY; import static java.sql.Types.DOUBLE; import static java.sql.Types.INTEGER; import static java.sql.Types.VARCHAR; import static org.deegree.commons.utils.ArrayUtils.join; import static org.slf4j.LoggerFactory.getLogger; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Iterator; import java.util.LinkedList; import javax.xml.stream.FactoryConfigurationError; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import org.deegree.commons.annotations.LoggingNotes; import org.deegree.commons.config.DeegreeWorkspace; import org.deegree.commons.jdbc.ConnectionManager; import org.deegree.commons.jdbc.param.DefaultJDBCParams; import org.deegree.commons.jdbc.param.JDBCParams; import org.deegree.commons.utils.DoublePair; import org.deegree.commons.utils.Triple; import org.deegree.style.se.unevaluated.Style; import org.deegree.style.styling.LineStyling; import org.deegree.style.styling.PointStyling; import org.deegree.style.styling.PolygonStyling; import org.deegree.style.styling.Styling; import org.deegree.style.styling.TextStyling; import org.deegree.style.styling.components.Fill; import org.deegree.style.styling.components.Font; import org.deegree.style.styling.components.Graphic; import org.deegree.style.styling.components.Halo; import org.deegree.style.styling.components.LinePlacement; import org.deegree.style.styling.components.Stroke; import org.slf4j.Logger; /** * <code>PostgreSQLWriter</code> * * @author <a href="mailto:[email protected]">Andreas Schmitz</a> * @author last edited by: $Author: aschmitz $ * * @version $Revision: 30481 $, $Date: 2011-04-18 16:10:34 +0200 (Mon, 18 Apr 2011) $ */ @LoggingNotes(trace = "logs stack traces", info = "logs connection problems with the DB") public class PostgreSQLWriter { private static final Logger LOG = getLogger( PostgreSQLWriter.class ); private final String connId; private final String schema; private DeegreeWorkspace workspace; /** * @param connId */ public PostgreSQLWriter( String connId, String schema, DeegreeWorkspace workspace ) { this.connId = connId; this.schema = schema; this.workspace = workspace; } private int write( Connection conn, Graphic graphic ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".graphics (size, rotation, anchorx, anchory, displacementx, displacementy, wellknownname, svg, base64raster, fill_id, stroke_id) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) returning id" ); stmt.setDouble( 1, graphic.size ); stmt.setDouble( 2, graphic.rotation ); stmt.setDouble( 3, graphic.anchorPointX ); stmt.setDouble( 4, graphic.anchorPointY ); stmt.setDouble( 5, graphic.displacementX ); stmt.setDouble( 6, graphic.displacementY ); // maybe a little harsh, but better than mangling it w/ the if/else below stmt.setNull( 7, VARCHAR ); stmt.setNull( 8, VARCHAR ); stmt.setNull( 9, VARCHAR ); stmt.setNull( 10, INTEGER ); stmt.setNull( 11, INTEGER ); if ( graphic.image != null ) { // TODO base64PNG } else if ( graphic.mark != null ) { if ( graphic.mark.shape != null ) { // TODO svg? } else { stmt.setString( 7, graphic.mark.wellKnown.toString() ); } if ( graphic.mark.fill != null ) { stmt.setInt( 10, write( conn, graphic.mark.fill ) ); } if ( graphic.mark.stroke != null ) { stmt.setInt( 11, write( conn, graphic.mark.stroke ) ); } } rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private int write( Connection conn, Fill fill ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".fills (color, graphic_id) values (?, ?) returning id" ); String hex = Integer.toHexString( fill.color.getRGB() & 0xffffff ); while ( hex.length() < 6 ) { hex = "0" + hex; } stmt.setString( 1, "#" + hex ); if ( fill.graphic != null ) { stmt.setInt( 2, write( conn, fill.graphic ) ); } else { stmt.setNull( 2, INTEGER ); } rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private int write( Connection conn, Stroke stroke ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".strokes (color, width, linejoin, linecap, dasharray, dashoffset, stroke_graphic_id, fill_graphic_id, strokegap, strokeinitialgap, positionpercentage) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) returning id" ); String hex = Integer.toHexString( stroke.color.getRGB() & 0xffffff ); while ( hex.length() < 6 ) { hex = "0" + hex; } stmt.setString( 1, "#" + hex ); stmt.setDouble( 2, stroke.width ); if ( stroke.linejoin != null ) { stmt.setString( 3, stroke.linejoin.toString() ); } else { stmt.setNull( 3, VARCHAR ); } if ( stroke.linecap != null ) { stmt.setString( 4, stroke.linecap.toString() ); } else { stmt.setNull( 4, VARCHAR ); } if ( stroke.dasharray != null ) { stmt.setString( 5, join( " ", stroke.dasharray ) ); } else { stmt.setNull( 5, VARCHAR ); } stmt.setDouble( 6, stroke.dashoffset ); if ( stroke.stroke != null ) { stmt.setInt( 7, write( conn, stroke.stroke ) ); } else { stmt.setNull( 7, INTEGER ); } if ( stroke.fill != null ) { stmt.setInt( 8, write( conn, stroke.fill ) ); } else { stmt.setNull( 8, INTEGER ); } stmt.setDouble( 9, stroke.strokeGap ); stmt.setDouble( 10, stroke.strokeInitialGap ); if ( stroke.positionPercentage >= 0 ) { stmt.setDouble( 11, stroke.positionPercentage ); } else { stmt.setNull( 11, DOUBLE ); } rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private int write( Connection conn, Font font ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".fonts (family, style, bold, size) values (?, ?, ?, ?) returning id" ); stmt.setString( 1, join( ",", font.fontFamily ) ); stmt.setString( 2, font.fontStyle.toString() ); stmt.setBoolean( 3, font.bold ); stmt.setDouble( 4, font.fontSize ); rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private int write( Connection conn, LinePlacement lineplacement ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".lineplacements (perpendicularoffset, repeat, initialgap, gap, isaligned, generalizeline) values (?, ?, ?, ?, ?, ?) returning id" ); stmt.setDouble( 1, lineplacement.perpendicularOffset ); stmt.setBoolean( 2, lineplacement.repeat ); stmt.setDouble( 3, lineplacement.initialGap ); stmt.setDouble( 4, lineplacement.gap ); stmt.setBoolean( 5, lineplacement.isAligned ); stmt.setBoolean( 6, lineplacement.generalizeLine ); rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private int write( Connection conn, Halo halo ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".halos (fill_id, radius) values (?, ?) returning id" ); if ( halo.fill == null ) { stmt.setNull( 1, INTEGER ); } else { stmt.setInt( 1, write( conn, halo.fill ) ); } stmt.setDouble( 2, halo.radius ); rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private int write( Connection conn, PointStyling styling ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".points (uom, graphic_id) values (?, ?) returning id" ); stmt.setString( 1, styling.uom.toString() ); if ( styling.graphic != null ) { stmt.setInt( 2, write( conn, styling.graphic ) ); } else { stmt.setNull( 2, INTEGER ); } rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private int write( Connection conn, LineStyling styling ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".lines (uom, stroke_id, perpendicularoffset) values (?, ?, ?) returning id" ); stmt.setString( 1, styling.uom.toString() ); if ( styling.stroke != null ) { stmt.setInt( 2, write( conn, styling.stroke ) ); } else { stmt.setNull( 2, INTEGER ); } stmt.setDouble( 3, styling.perpendicularOffset ); rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private int write( Connection conn, PolygonStyling styling ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".polygons (uom, fill_id, stroke_id, displacementx, displacementy, perpendicularoffset) values (?, ?, ?, ?, ?, ?) returning id" ); stmt.setString( 1, styling.uom.toString() ); if ( styling.fill != null ) { stmt.setInt( 2, write( conn, styling.fill ) ); } else { stmt.setNull( 2, INTEGER ); } if ( styling.stroke != null ) { stmt.setInt( 3, write( conn, styling.stroke ) ); } else { stmt.setNull( 3, INTEGER ); } stmt.setDouble( 4, styling.displacementX ); stmt.setDouble( 5, styling.displacementY ); stmt.setDouble( 6, styling.perpendicularOffset ); rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private int write( Connection conn, TextStyling styling, String labelexpr ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".texts (labelexpr, uom, font_id, fill_id, rotation, displacementx, displacementy, anchorx, anchory, lineplacement_id, halo_id) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) returning id" ); stmt.setString( 1, labelexpr ); stmt.setString( 2, styling.uom.toString() ); if ( styling.font == null ) { stmt.setNull( 3, INTEGER ); } else { stmt.setInt( 3, write( conn, styling.font ) ); } if ( styling.fill == null ) { stmt.setNull( 4, INTEGER ); } else { stmt.setInt( 4, write( conn, styling.fill ) ); } stmt.setDouble( 5, styling.rotation ); stmt.setDouble( 6, styling.displacementX ); stmt.setDouble( 7, styling.displacementY ); stmt.setDouble( 8, styling.anchorPointX ); stmt.setDouble( 9, styling.anchorPointY ); if ( styling.linePlacement == null ) { stmt.setNull( 10, INTEGER ); } else { stmt.setInt( 10, write( conn, styling.linePlacement ) ); } if ( styling.halo == null ) { stmt.setNull( 11, INTEGER ); } else { stmt.setInt( 11, write( conn, styling.halo ) ); } rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private void write( Styling styling, DoublePair scales, String name, String labelexpr ) { PreparedStatement stmt = null; Connection conn = null; try { ConnectionManager mgr = workspace.getSubsystemManager( ConnectionManager.class ); conn = mgr.get( connId ); conn.setAutoCommit( false ); stmt = conn.prepareStatement( "insert into " + schema + ".styles (type, fk, minscale, maxscale, name) values (?, ?, ?, ?, ?)" ); if ( styling instanceof PointStyling ) { stmt.setString( 1, "POINT" ); stmt.setInt( 2, write( conn, (PointStyling) styling ) ); } else if ( styling instanceof LineStyling ) { stmt.setString( 1, "LINE" ); stmt.setInt( 2, write( conn, (LineStyling) styling ) ); } else if ( styling instanceof PolygonStyling ) { stmt.setString( 1, "POLYGON" ); stmt.setInt( 2, write( conn, (PolygonStyling) styling ) ); } else if ( styling instanceof TextStyling ) { stmt.setString( 1, "TEXT" ); stmt.setInt( 2, write( conn, (TextStyling) styling, labelexpr ) ); } if ( scales != null ) { if ( scales.first != NEGATIVE_INFINITY ) { stmt.setDouble( 3, scales.first ); } else { stmt.setNull( 3, DOUBLE ); } if ( scales.second != POSITIVE_INFINITY ) { stmt.setDouble( 4, scales.second ); } else { stmt.setNull( 4, DOUBLE ); } } if ( name == null ) { stmt.setNull( 5, VARCHAR ); } else { stmt.setString( 5, name ); } stmt.executeUpdate(); conn.commit(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } finally { if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( conn != null ) { try { conn.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } /** * @param style * @param name */ public void write( Style style, String name ) { for ( Triple<LinkedList<Styling>, DoublePair, LinkedList<String>> p : style.getBasesWithScales() ) { Iterator<String> labelexprs = p.third.iterator(); for ( Styling s : p.first ) { write( s, p.second, name == null ? style.getName() : name, labelexprs.hasNext() ? labelexprs.next() : null ); } } } /** * Writes a style as SLD/SE 'blob'. * * @param in * @param name * @throws IOException */ public void write( InputStream in, String name ) throws IOException { PreparedStatement stmt = null; Connection conn = null; try { ConnectionManager mgr = workspace.getSubsystemManager( ConnectionManager.class ); conn = mgr.get( connId ); conn.setAutoCommit( false ); stmt = conn.prepareStatement( "insert into styles (sld, name) values (?, ?)" ); StringBuilder sb = new StringBuilder(); String s = null; BufferedReader bin = new BufferedReader( new InputStreamReader( in, "UTF-8" ) ); while ( ( s = bin.readLine() ) != null ) { sb.append( s ).append( "\n" ); } in.close(); stmt.setString( 1, sb.toString() ); if ( name == null ) { stmt.setNull( 2, VARCHAR ); } else { stmt.setString( 2, name ); } stmt.executeUpdate(); conn.commit(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } finally { if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( conn != null ) { try { conn.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } /** * Simple importer for SE files, with hardcoded 'configtool' on localhost. * * @param args * @throws XMLStreamException * @throws FactoryConfigurationError * @throws IOException */ public static void main( String[] args ) throws XMLStreamException, FactoryConfigurationError, IOException { Style style = new SymbologyParser( true ).parse( XMLInputFactory.newInstance().createXMLStreamReader( new FileInputStream( args[0] ) ) ); DeegreeWorkspace workspace = DeegreeWorkspace.getInstance(); ConnectionManager mgr = workspace.getSubsystemManager( ConnectionManager.class ); JDBCParams params = new DefaultJDBCParams( "jdbc:postgresql://localhost/configtool", "postgres", "postgres", false ); mgr.addPool( "configtool", params, workspace ); if ( style.isSimple() ) { new PostgreSQLWriter( "configtool", "schematest", workspace ).write( style, null ); } else { new PostgreSQLWriter( "configtool", "schematest", workspace ).write( new FileInputStream( args[0] ), style.getName() ); } } }
deegree-core/deegree-core-style/src/main/java/org/deegree/style/se/parser/PostgreSQLWriter.java
//$HeadURL: svn+ssh://[email protected]/deegree/deegree3/trunk/deegree-core/deegree-core-rendering-2d/src/main/java/org/deegree/rendering/r2d/se/parser/PostgreSQLWriter.java $ /*---------------------------------------------------------------------------- This file is part of deegree, http://deegree.org/ Copyright (C) 2001-2010 by: - Department of Geography, University of Bonn - and - lat/lon GmbH - This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Contact information: lat/lon GmbH Aennchenstr. 19, 53177 Bonn Germany http://lat-lon.de/ Department of Geography, University of Bonn Prof. Dr. Klaus Greve Postfach 1147, 53001 Bonn Germany http://www.geographie.uni-bonn.de/deegree/ e-mail: [email protected] ----------------------------------------------------------------------------*/ package org.deegree.style.se.parser; import static java.lang.Double.NEGATIVE_INFINITY; import static java.lang.Double.POSITIVE_INFINITY; import static java.sql.Types.DOUBLE; import static java.sql.Types.INTEGER; import static java.sql.Types.VARCHAR; import static org.deegree.commons.jdbc.ConnectionManager.getConnection; import static org.deegree.commons.utils.ArrayUtils.join; import static org.slf4j.LoggerFactory.getLogger; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Iterator; import java.util.LinkedList; import javax.xml.stream.FactoryConfigurationError; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import org.deegree.commons.annotations.LoggingNotes; import org.deegree.commons.config.DeegreeWorkspace; import org.deegree.commons.jdbc.ConnectionManager; import org.deegree.commons.jdbc.param.DefaultJDBCParams; import org.deegree.commons.jdbc.param.JDBCParams; import org.deegree.commons.utils.DoublePair; import org.deegree.commons.utils.Triple; import org.deegree.style.se.unevaluated.Style; import org.deegree.style.styling.LineStyling; import org.deegree.style.styling.PointStyling; import org.deegree.style.styling.PolygonStyling; import org.deegree.style.styling.Styling; import org.deegree.style.styling.TextStyling; import org.deegree.style.styling.components.Fill; import org.deegree.style.styling.components.Font; import org.deegree.style.styling.components.Graphic; import org.deegree.style.styling.components.Halo; import org.deegree.style.styling.components.LinePlacement; import org.deegree.style.styling.components.Stroke; import org.slf4j.Logger; /** * <code>PostgreSQLWriter</code> * * @author <a href="mailto:[email protected]">Andreas Schmitz</a> * @author last edited by: $Author: aschmitz $ * * @version $Revision: 30481 $, $Date: 2011-04-18 16:10:34 +0200 (Mon, 18 Apr 2011) $ */ @LoggingNotes(trace = "logs stack traces", info = "logs connection problems with the DB") public class PostgreSQLWriter { private static final Logger LOG = getLogger( PostgreSQLWriter.class ); private final String connId; private final String schema; private DeegreeWorkspace workspace; /** * @param connId */ public PostgreSQLWriter( String connId, String schema, DeegreeWorkspace workspace ) { this.connId = connId; this.schema = schema; this.workspace = workspace; } private int write( Connection conn, Graphic graphic ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".graphics (size, rotation, anchorx, anchory, displacementx, displacementy, wellknownname, svg, base64raster, fill_id, stroke_id) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) returning id" ); stmt.setDouble( 1, graphic.size ); stmt.setDouble( 2, graphic.rotation ); stmt.setDouble( 3, graphic.anchorPointX ); stmt.setDouble( 4, graphic.anchorPointY ); stmt.setDouble( 5, graphic.displacementX ); stmt.setDouble( 6, graphic.displacementY ); // maybe a little harsh, but better than mangling it w/ the if/else below stmt.setNull( 7, VARCHAR ); stmt.setNull( 8, VARCHAR ); stmt.setNull( 9, VARCHAR ); stmt.setNull( 10, INTEGER ); stmt.setNull( 11, INTEGER ); if ( graphic.image != null ) { // TODO base64PNG } else if ( graphic.mark != null ) { if ( graphic.mark.shape != null ) { // TODO svg? } else { stmt.setString( 7, graphic.mark.wellKnown.toString() ); } if ( graphic.mark.fill != null ) { stmt.setInt( 10, write( conn, graphic.mark.fill ) ); } if ( graphic.mark.stroke != null ) { stmt.setInt( 11, write( conn, graphic.mark.stroke ) ); } } rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private int write( Connection conn, Fill fill ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".fills (color, graphic_id) values (?, ?) returning id" ); String hex = Integer.toHexString( fill.color.getRGB() & 0xffffff ); while ( hex.length() < 6 ) { hex = "0" + hex; } stmt.setString( 1, "#" + hex ); if ( fill.graphic != null ) { stmt.setInt( 2, write( conn, fill.graphic ) ); } else { stmt.setNull( 2, INTEGER ); } rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private int write( Connection conn, Stroke stroke ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".strokes (color, width, linejoin, linecap, dasharray, dashoffset, stroke_graphic_id, fill_graphic_id, strokegap, strokeinitialgap, positionpercentage) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) returning id" ); String hex = Integer.toHexString( stroke.color.getRGB() & 0xffffff ); while ( hex.length() < 6 ) { hex = "0" + hex; } stmt.setString( 1, "#" + hex ); stmt.setDouble( 2, stroke.width ); if ( stroke.linejoin != null ) { stmt.setString( 3, stroke.linejoin.toString() ); } else { stmt.setNull( 3, VARCHAR ); } if ( stroke.linecap != null ) { stmt.setString( 4, stroke.linecap.toString() ); } else { stmt.setNull( 4, VARCHAR ); } if ( stroke.dasharray != null ) { stmt.setString( 5, join( " ", stroke.dasharray ) ); } else { stmt.setNull( 5, VARCHAR ); } stmt.setDouble( 6, stroke.dashoffset ); if ( stroke.stroke != null ) { stmt.setInt( 7, write( conn, stroke.stroke ) ); } else { stmt.setNull( 7, INTEGER ); } if ( stroke.fill != null ) { stmt.setInt( 8, write( conn, stroke.fill ) ); } else { stmt.setNull( 8, INTEGER ); } stmt.setDouble( 9, stroke.strokeGap ); stmt.setDouble( 10, stroke.strokeInitialGap ); if ( stroke.positionPercentage >= 0 ) { stmt.setDouble( 11, stroke.positionPercentage ); } else { stmt.setNull( 11, DOUBLE ); } rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private int write( Connection conn, Font font ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".fonts (family, style, bold, size) values (?, ?, ?, ?) returning id" ); stmt.setString( 1, join( ",", font.fontFamily ) ); stmt.setString( 2, font.fontStyle.toString() ); stmt.setBoolean( 3, font.bold ); stmt.setDouble( 4, font.fontSize ); rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private int write( Connection conn, LinePlacement lineplacement ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".lineplacements (perpendicularoffset, repeat, initialgap, gap, isaligned, generalizeline) values (?, ?, ?, ?, ?, ?) returning id" ); stmt.setDouble( 1, lineplacement.perpendicularOffset ); stmt.setBoolean( 2, lineplacement.repeat ); stmt.setDouble( 3, lineplacement.initialGap ); stmt.setDouble( 4, lineplacement.gap ); stmt.setBoolean( 5, lineplacement.isAligned ); stmt.setBoolean( 6, lineplacement.generalizeLine ); rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private int write( Connection conn, Halo halo ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".halos (fill_id, radius) values (?, ?) returning id" ); if ( halo.fill == null ) { stmt.setNull( 1, INTEGER ); } else { stmt.setInt( 1, write( conn, halo.fill ) ); } stmt.setDouble( 2, halo.radius ); rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private int write( Connection conn, PointStyling styling ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".points (uom, graphic_id) values (?, ?) returning id" ); stmt.setString( 1, styling.uom.toString() ); if ( styling.graphic != null ) { stmt.setInt( 2, write( conn, styling.graphic ) ); } else { stmt.setNull( 2, INTEGER ); } rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private int write( Connection conn, LineStyling styling ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".lines (uom, stroke_id, perpendicularoffset) values (?, ?, ?) returning id" ); stmt.setString( 1, styling.uom.toString() ); if ( styling.stroke != null ) { stmt.setInt( 2, write( conn, styling.stroke ) ); } else { stmt.setNull( 2, INTEGER ); } stmt.setDouble( 3, styling.perpendicularOffset ); rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private int write( Connection conn, PolygonStyling styling ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".polygons (uom, fill_id, stroke_id, displacementx, displacementy, perpendicularoffset) values (?, ?, ?, ?, ?, ?) returning id" ); stmt.setString( 1, styling.uom.toString() ); if ( styling.fill != null ) { stmt.setInt( 2, write( conn, styling.fill ) ); } else { stmt.setNull( 2, INTEGER ); } if ( styling.stroke != null ) { stmt.setInt( 3, write( conn, styling.stroke ) ); } else { stmt.setNull( 3, INTEGER ); } stmt.setDouble( 4, styling.displacementX ); stmt.setDouble( 5, styling.displacementY ); stmt.setDouble( 6, styling.perpendicularOffset ); rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private int write( Connection conn, TextStyling styling, String labelexpr ) throws SQLException { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement( "insert into " + schema + ".texts (labelexpr, uom, font_id, fill_id, rotation, displacementx, displacementy, anchorx, anchory, lineplacement_id, halo_id) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) returning id" ); stmt.setString( 1, labelexpr ); stmt.setString( 2, styling.uom.toString() ); if ( styling.font == null ) { stmt.setNull( 3, INTEGER ); } else { stmt.setInt( 3, write( conn, styling.font ) ); } if ( styling.fill == null ) { stmt.setNull( 4, INTEGER ); } else { stmt.setInt( 4, write( conn, styling.fill ) ); } stmt.setDouble( 5, styling.rotation ); stmt.setDouble( 6, styling.displacementX ); stmt.setDouble( 7, styling.displacementY ); stmt.setDouble( 8, styling.anchorPointX ); stmt.setDouble( 9, styling.anchorPointY ); if ( styling.linePlacement == null ) { stmt.setNull( 10, INTEGER ); } else { stmt.setInt( 10, write( conn, styling.linePlacement ) ); } if ( styling.halo == null ) { stmt.setNull( 11, INTEGER ); } else { stmt.setInt( 11, write( conn, styling.halo ) ); } rs = stmt.executeQuery(); if ( rs.next() ) { return rs.getInt( 1 ); } return -1; } finally { if ( rs != null ) { try { rs.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } private void write( Styling styling, DoublePair scales, String name, String labelexpr ) { PreparedStatement stmt = null; Connection conn = null; try { ConnectionManager mgr = workspace.getSubsystemManager( ConnectionManager.class ); conn = mgr.get( connId ); conn.setAutoCommit( false ); stmt = conn.prepareStatement( "insert into " + schema + ".styles (type, fk, minscale, maxscale, name) values (?, ?, ?, ?, ?)" ); if ( styling instanceof PointStyling ) { stmt.setString( 1, "POINT" ); stmt.setInt( 2, write( conn, (PointStyling) styling ) ); } else if ( styling instanceof LineStyling ) { stmt.setString( 1, "LINE" ); stmt.setInt( 2, write( conn, (LineStyling) styling ) ); } else if ( styling instanceof PolygonStyling ) { stmt.setString( 1, "POLYGON" ); stmt.setInt( 2, write( conn, (PolygonStyling) styling ) ); } else if ( styling instanceof TextStyling ) { stmt.setString( 1, "TEXT" ); stmt.setInt( 2, write( conn, (TextStyling) styling, labelexpr ) ); } if ( scales != null ) { if ( scales.first != NEGATIVE_INFINITY ) { stmt.setDouble( 3, scales.first ); } else { stmt.setNull( 3, DOUBLE ); } if ( scales.second != POSITIVE_INFINITY ) { stmt.setDouble( 4, scales.second ); } else { stmt.setNull( 4, DOUBLE ); } } if ( name == null ) { stmt.setNull( 5, VARCHAR ); } else { stmt.setString( 5, name ); } stmt.executeUpdate(); conn.commit(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } finally { if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( conn != null ) { try { conn.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } /** * @param style * @param name */ public void write( Style style, String name ) { for ( Triple<LinkedList<Styling>, DoublePair, LinkedList<String>> p : style.getBasesWithScales() ) { Iterator<String> labelexprs = p.third.iterator(); for ( Styling s : p.first ) { write( s, p.second, name == null ? style.getName() : name, labelexprs.hasNext() ? labelexprs.next() : null ); } } } /** * Writes a style as SLD/SE 'blob'. * * @param in * @param name * @throws IOException */ public void write( InputStream in, String name ) throws IOException { PreparedStatement stmt = null; Connection conn = null; try { ConnectionManager mgr = workspace.getSubsystemManager( ConnectionManager.class ); conn = mgr.get( connId ); conn.setAutoCommit( false ); stmt = conn.prepareStatement( "insert into styles (sld, name) values (?, ?)" ); StringBuilder sb = new StringBuilder(); String s = null; BufferedReader bin = new BufferedReader( new InputStreamReader( in, "UTF-8" ) ); while ( ( s = bin.readLine() ) != null ) { sb.append( s ).append( "\n" ); } in.close(); stmt.setString( 1, sb.toString() ); if ( name == null ) { stmt.setNull( 2, VARCHAR ); } else { stmt.setString( 2, name ); } stmt.executeUpdate(); conn.commit(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } finally { if ( stmt != null ) { try { stmt.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } if ( conn != null ) { try { conn.close(); } catch ( SQLException e ) { LOG.info( "Unable to write style to DB: '{}'.", e.getLocalizedMessage() ); LOG.trace( "Stack trace:", e ); } } } } /** * Simple importer for SE files, with hardcoded 'configtool' on localhost. * * @param args * @throws XMLStreamException * @throws FactoryConfigurationError * @throws IOException */ public static void main( String[] args ) throws XMLStreamException, FactoryConfigurationError, IOException { Style style = new SymbologyParser( true ).parse( XMLInputFactory.newInstance().createXMLStreamReader( new FileInputStream( args[0] ) ) ); DeegreeWorkspace workspace = DeegreeWorkspace.getInstance(); ConnectionManager mgr = workspace.getSubsystemManager( ConnectionManager.class ); JDBCParams params = new DefaultJDBCParams( "jdbc:postgresql://localhost/configtool", "postgres", "postgres", false ); mgr.addPool( "configtool", params, workspace ); ConnectionManager.addConnection( "configtool", "jdbc:postgresql://localhost/configtool", "postgres", "", 5, 20 ); if ( style.isSimple() ) { new PostgreSQLWriter( "configtool", "schematest", workspace ).write( style, null ); } else { new PostgreSQLWriter( "configtool", "schematest", workspace ).write( new FileInputStream( args[0] ), style.getName() ); } } }
removed static addConnection call
deegree-core/deegree-core-style/src/main/java/org/deegree/style/se/parser/PostgreSQLWriter.java
removed static addConnection call
Java
unlicense
380ac3beff3e61644f4d5bb7de0f68220dc496f6
0
typohh/GTPRest
package typo.gtp; import java.io.FileInputStream; import java.io.IOException; import java.util.Properties; import typo.gtp.ChannelAPI.ChannelException; public class MimicKGS { public static void main( String[] pArg ) throws IOException, ChannelException { Properties prop = new Properties(); String fileName = "properties.ini"; if( pArg.length > 0 ) { fileName = pArg[0]; } FileInputStream input = new FileInputStream( fileName ); prop.load( input ); input.close(); String engine=prop.getProperty( "engine" ); long id=Long.parseLong( prop.getProperty( "id" , "0" ).trim() ); boolean blitz=Boolean.parseBoolean( prop.getProperty( "blitz" , "true" ).trim() ); boolean fast=Boolean.parseBoolean( prop.getProperty( "fast" , "true" ).trim() ); double lag=Double.parseDouble( prop.getProperty( "lag" , "3" ).trim() ); CommandLineGTP gtp = new CommandLineGTP( engine , new String[] { "boardsize 19", "komi 7.5" } ); WebSocketEngine wse = new WebSocketEngine( gtp , id , blitz , fast , lag ); wse.start(); } }
src/typo/gtp/MimicKGS.java
package typo.gtp; import java.io.FileInputStream; import java.io.IOException; import java.util.Properties; import typo.gtp.ChannelAPI.ChannelException; public class MimicKGS { public static void main( String[] pArg ) throws IOException, ChannelException { Properties prop = new Properties(); String fileName = "properties.ini"; if( pArg.length > 0 ) { fileName = pArg[0]; } FileInputStream input = new FileInputStream( fileName ); prop.load( input ); input.close(); String engine=prop.getProperty( "engine" ); long id=Long.parseLong( prop.getProperty( "id" , "0" ).trim() ); boolean blitz=Boolean.parseBoolean( prop.getProperty( "blitz" , "true" ).trim() ); boolean fast=Boolean.parseBoolean( prop.getProperty( "fast" , "true" ).trim() ); CommandLineGTP gtp = new CommandLineGTP( engine , new String[] { "boardsize 19", "komi 7.5" } ); WebSocketEngine wse = new WebSocketEngine( gtp , id , blitz , fast ); wse.start(); } }
stress
src/typo/gtp/MimicKGS.java
stress
Java
apache-2.0
51ecf4614728e6811305046929705ed8e5443fe4
0
doerfli/hacked,doerfli/hacked,doerfli/hacked
package li.doerf.hacked; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import androidx.test.espresso.NoMatchingViewException; import androidx.test.filters.LargeTest; import androidx.test.rule.ActivityTestRule; import androidx.test.runner.AndroidJUnit4; import li.doerf.hacked.activities.MainActivity; import static androidx.test.espresso.Espresso.onView; import static androidx.test.espresso.action.ViewActions.click; import static androidx.test.espresso.action.ViewActions.longClick; import static androidx.test.espresso.action.ViewActions.typeText; import static androidx.test.espresso.assertion.ViewAssertions.matches; import static androidx.test.espresso.matcher.ViewMatchers.isDisplayed; import static androidx.test.espresso.matcher.ViewMatchers.withId; import static androidx.test.espresso.matcher.ViewMatchers.withText; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.not; @RunWith(AndroidJUnit4.class) @LargeTest public class SmokeTest { static final String ACCOUNT_NAME = "aaa"; @Rule public ActivityTestRule<MainActivity> mActivityRule = new ActivityTestRule<>(MainActivity.class); @Test public void addAccountAaaWithLeaks() { removeAccountIfExists(); onView(withId(R.id.action_add_account)).perform(click()); onView(withId(R.id.account)).perform(typeText(ACCOUNT_NAME)); onView(withText("Add")).perform(click()); try { Thread.sleep(10000); } catch (InterruptedException e) { e.printStackTrace(); } onView(withId(R.id.last_checked)).check(matches(not(withText("yyyy/mm/dd hh:mm")))); onView(withId(R.id.last_checked)).check(matches(not(withText("-")))); onView(withId(R.id.breach_state)).check(matches(withText(containsString("breaches")))); } private void removeAccountIfExists() { try { onView(withText(ACCOUNT_NAME)).check(matches(isDisplayed())); onView(withText(ACCOUNT_NAME)).perform(longClick()); onView(withText("OK")).perform(click()); } catch (NoMatchingViewException e) { //view not displayed - nothing to do } } }
app/src/androidTest/java/li/doerf/hacked/SmokeTest.java
package li.doerf.hacked; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import androidx.test.espresso.NoMatchingViewException; import androidx.test.filters.LargeTest; import androidx.test.rule.ActivityTestRule; import androidx.test.runner.AndroidJUnit4; import li.doerf.hacked.activities.MainActivity; import static androidx.test.espresso.Espresso.onView; import static androidx.test.espresso.action.ViewActions.click; import static androidx.test.espresso.action.ViewActions.longClick; import static androidx.test.espresso.action.ViewActions.typeText; import static androidx.test.espresso.assertion.ViewAssertions.matches; import static androidx.test.espresso.matcher.ViewMatchers.isDisplayed; import static androidx.test.espresso.matcher.ViewMatchers.withId; import static androidx.test.espresso.matcher.ViewMatchers.withText; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.not; @RunWith(AndroidJUnit4.class) @LargeTest public class SmokeTest { static final String ACCOUNT_NAME = "aaa"; @Rule public ActivityTestRule<MainActivity> mActivityRule = new ActivityTestRule<>(MainActivity.class); @Test public void addAccountAaaWithLeaks() { removeAccountIfExists(); onView(withId(R.id.action_add_account)).perform(click()); onView(withId(R.id.account)).perform(typeText(ACCOUNT_NAME)); onView(withText("Add")).perform(click()); onView(withId(R.id.last_checked)).check(matches(withText("-"))); try { Thread.sleep(10000); } catch (InterruptedException e) { e.printStackTrace(); } onView(withId(R.id.last_checked)).check(matches(not(withText("yyyy/mm/dd hh:mm")))); onView(withId(R.id.last_checked)).check(matches(not(withText("-")))); onView(withId(R.id.breach_state)).check(matches(withText(containsString("breaches")))); } private void removeAccountIfExists() { try { onView(withText(ACCOUNT_NAME)).check(matches(isDisplayed())); onView(withText(ACCOUNT_NAME)).perform(longClick()); onView(withText("OK")).perform(click()); } catch (NoMatchingViewException e) { //view not displayed - nothing to do } } }
run smoke test in ci
app/src/androidTest/java/li/doerf/hacked/SmokeTest.java
run smoke test in ci
Java
apache-2.0
a353d7de84a198e25dd475857ac0ccd788cb8dac
0
mini2Dx/mini2Dx,mini2Dx/mini2Dx
/******************************************************************************* * Copyright 2019 See AUTHORS file * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package org.mini2Dx.tiled; import org.mini2Dx.core.Mdx; import org.mini2Dx.core.assets.AssetDescriptor; import org.mini2Dx.core.assets.AssetManager; import org.mini2Dx.core.files.FileHandle; import org.mini2Dx.core.files.FileType; import org.mini2Dx.core.graphics.Color; import org.mini2Dx.core.graphics.TextureAtlas; import org.mini2Dx.core.serialization.GameDataSerializable; import org.mini2Dx.core.serialization.GameDataSerializableUtils; import org.mini2Dx.gdx.math.MathUtils; import org.mini2Dx.gdx.utils.Array; import org.mini2Dx.gdx.utils.IntSet; import org.mini2Dx.gdx.utils.ObjectMap; import org.mini2Dx.gdx.utils.ObjectSet; import org.mini2Dx.tiled.exception.TiledException; import org.mini2Dx.tiled.exception.TiledParsingException; import org.mini2Dx.tiled.renderer.AnimatedTileRenderer; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.util.Objects; /** * Data parsed from a TMX file created with Tiled */ public class TiledMapData implements TiledParserListener, GameDataSerializable { public static long MAX_TILESET_LOAD_TIMESLICE_MILLIS = 2L; static final ObjectSet<String> OBJECT_TEMPLATE_TILESET_SOURCES = new ObjectSet<String>(); protected FileHandle fileHandle; protected final Array<Tileset> tilesets = new Array<Tileset>(true, 2, Tileset.class); protected final IntSet tilesetGids = new IntSet(); protected final Array<Layer> layers = new Array<Layer>(true, 2, Layer.class); protected final ObjectMap<String, TiledObjectGroup> objectGroups = new ObjectMap<String, TiledObjectGroup>(8); private String orientationValue; private Orientation orientation; private StaggerAxis staggerAxis; private StaggerIndex staggerIndex; private int width, height, tileWidth, tileHeight, pixelWidth, pixelHeight, sideLength; private Color backgroundColor; private Array<Tile> animatedTiles; private ObjectMap<String, String> properties; /** * * @param fileHandle * @throws TiledException */ public TiledMapData(FileHandle fileHandle) { this(new TiledParser(), fileHandle); } /** * * @param tiledParser * @param fileHandle * @throws TiledException */ public TiledMapData(TiledParser tiledParser, FileHandle fileHandle) { super(); this.fileHandle = fileHandle; tiledParser.setListener(this); try { tiledParser.parseTmx(fileHandle); } catch (IOException e) { tiledParser.setListener(null); throw new TiledParsingException(e); } tiledParser.setListener(null); } private TiledMapData(boolean load, FileHandle fileHandle) { super(); this.fileHandle = fileHandle; } public static TiledMapData fromInputStream(DataInputStream inputStream) throws IOException { final String path = inputStream.readUTF(); final FileType fileType = FileType.valueOf(inputStream.readUTF()); final TiledMapData mapData; switch (fileType) { default: case INTERNAL: mapData = new TiledMapData(false, Mdx.files.internal(path)); break; case EXTERNAL: mapData = new TiledMapData(false, Mdx.files.external(path)); break; case LOCAL: mapData = new TiledMapData(false, Mdx.files.local(path)); break; } mapData.readData(inputStream); return mapData; } @Override public void writeData(DataOutputStream outputStream) throws IOException { outputStream.writeUTF(fileHandle.path()); outputStream.writeUTF(fileHandle.type().name()); outputStream.writeInt(OBJECT_TEMPLATE_TILESET_SOURCES.size); for(String objectTemplateTilesetSources : OBJECT_TEMPLATE_TILESET_SOURCES) { outputStream.writeUTF(objectTemplateTilesetSources); } GameDataSerializableUtils.writeString(orientationValue, outputStream); GameDataSerializableUtils.writeString(orientation == null ? null : orientation.name(), outputStream); GameDataSerializableUtils.writeString(staggerAxis == null ? null : staggerAxis.name(), outputStream); GameDataSerializableUtils.writeString(staggerIndex == null ? null : staggerIndex.name(), outputStream); outputStream.writeInt(width); outputStream.writeInt(height); outputStream.writeInt(tileWidth); outputStream.writeInt(tileHeight); outputStream.writeInt(pixelWidth); outputStream.writeInt(pixelHeight); outputStream.writeInt(sideLength); outputStream.writeBoolean(backgroundColor != null); if(backgroundColor != null) { outputStream.writeFloat(backgroundColor.rf()); outputStream.writeFloat(backgroundColor.gf()); outputStream.writeFloat(backgroundColor.bf()); outputStream.writeFloat(backgroundColor.af()); } outputStream.writeInt(properties == null ? 0 : properties.size); if(properties != null && properties.size > 0) { for(String key : properties.keys()) { outputStream.writeUTF(key); GameDataSerializableUtils.writeString(properties.get(key, null), outputStream); } } outputStream.writeInt(tilesets.size); for(int i = 0; i < tilesets.size; i++) { tilesets.get(i).writeData(outputStream); } outputStream.writeInt(layers.size); for(int i = 0; i < layers.size; i++) { layers.get(i).writeData(outputStream); } } @Override public void readData(DataInputStream inputStream) throws IOException { final int totalTemplateTilesetSources = inputStream.readInt(); for(int i = 0; i < totalTemplateTilesetSources; i++) { OBJECT_TEMPLATE_TILESET_SOURCES.add(inputStream.readUTF()); } orientationValue = GameDataSerializableUtils.readString(inputStream); final String orientation = GameDataSerializableUtils.readString(inputStream); if(orientation != null) { this.orientation = Orientation.valueOf(orientation); } final String staggerAxis = GameDataSerializableUtils.readString(inputStream); if(staggerAxis != null) { this.staggerAxis = StaggerAxis.valueOf(staggerAxis); } final String staggerIndex = GameDataSerializableUtils.readString(inputStream); if(staggerIndex != null) { this.staggerIndex = StaggerIndex.valueOf(staggerIndex); } width = inputStream.readInt(); height = inputStream.readInt(); tileWidth = inputStream.readInt(); tileHeight = inputStream.readInt(); pixelWidth = inputStream.readInt(); pixelHeight = inputStream.readInt(); sideLength = inputStream.readInt(); final boolean backgroundColorExists = inputStream.readBoolean(); if(backgroundColorExists) { final float r = inputStream.readFloat(); final float g = inputStream.readFloat(); final float b = inputStream.readFloat(); final float a = inputStream.readFloat(); backgroundColor = Mdx.graphics.newColor(r, g, b, a); } final int totalProperties = inputStream.readInt(); if(totalProperties > 0) { properties = new ObjectMap<>(); for(int i = 0; i < totalProperties; i++) { final String key = inputStream.readUTF(); final String value = GameDataSerializableUtils.readString(inputStream); properties.put(key, value); } } final int totalTilesets = inputStream.readInt(); for(int i = 0; i < totalTilesets; i++) { onTilesetParsed(Tileset.fromInputStream(this, inputStream)); } final int totalLayers = inputStream.readInt(); for(int i = 0; i < totalLayers; i++) { final Layer layer = Layer.fromInputStream(inputStream); switch (layer.getLayerType()) { default: case TILE: onTileLayerParsed((TileLayer) layer); break; case OBJECT: onObjectGroupParsed((TiledObjectGroup) layer); break; case IMAGE: break; case GROUP: onGroupLayerParsed((GroupLayer) layer); break; } } } public Array<AssetDescriptor> getDependencies() { Array<AssetDescriptor> dependencies = new Array<AssetDescriptor>(); for (int i = 0; i < tilesets.size; i++) { dependencies.addAll(tilesets.get(i).getDependencies(fileHandle)); } return dependencies; } /** * Loads all {@link Tileset} textures for this map if they are not already loaded<br> * Note: Depending on the texture sizes, this may need to be called over several frames * @return True if all tilesets + textures have been loaded, otherwise false */ public boolean loadTilesetTextures() { return loadTilesetTextures(true); } /** * Loads all {@link Tileset} textures for this map if they are not already loaded * <br>Note: Depending on the texture sizes, this may need to be called over several frames * @param assetManager The {@link AssetManager} to use * @return True if all tilesets + textures have been loaded, otherwise false */ public boolean loadTilesetTextures(AssetManager assetManager) { return loadTilesetTextures(assetManager, true); } /** * Loads all {@link Tileset} textures for this map if they are not already loaded * <br>Note: Depending on the texture sizes, this may need to be called over several frames * @param textureAtlas The {@link TextureAtlas} to load textures from * @return True if all tilesets + textures have been loaded, otherwise false */ public boolean loadTilesetTextures(TextureAtlas textureAtlas) { return loadTilesetTextures(textureAtlas, true); } /** * Loads all {@link Tileset} textures for this map if they are not already loaded * <br>Note: Depending on the texture sizes, this may need to be called over several frames * @param loadObjectTemplateTilesets True if tilesets used by object templates should be loaded * @return True if all tilesets + textures have been loaded, otherwise false */ public boolean loadTilesetTextures(boolean loadObjectTemplateTilesets) { final long startTime = System.currentTimeMillis(); for (int i = 0; i < tilesets.size; i++) { final Tileset tileset = tilesets.get(i); if(tileset.isTextureLoaded()) { continue; } if(!loadObjectTemplateTilesets && OBJECT_TEMPLATE_TILESET_SOURCES.contains(tileset.getSourceInternalUuid())) { continue; } tileset.loadTexture(fileHandle); if(System.currentTimeMillis() - startTime >= MAX_TILESET_LOAD_TIMESLICE_MILLIS) { return false; } } return true; } /** * Loads all {@link Tileset} textures for this map if they are not already loaded * <br>Note: Depending on the texture sizes, this may need to be called over several frames * @param loadObjectTemplateTilesets True if tilesets used by object templates should be loaded * @param assetManager The {@link AssetManager} to use * @return True if all tilesets + textures have been loaded, otherwise false */ public boolean loadTilesetTextures(AssetManager assetManager, boolean loadObjectTemplateTilesets) { final long startTime = System.currentTimeMillis(); for (int i = 0; i < tilesets.size; i++) { final Tileset tileset = tilesets.get(i); if(tileset.isTextureLoaded()) { continue; } if(!loadObjectTemplateTilesets && OBJECT_TEMPLATE_TILESET_SOURCES.contains(tileset.getSourceInternalUuid())) { continue; } tileset.loadTexture(assetManager, fileHandle); if(System.currentTimeMillis() - startTime >= MAX_TILESET_LOAD_TIMESLICE_MILLIS) { return false; } } return true; } /** * Loads all {@link Tileset} textures for this map if they are not already loaded * <br>Note: Depending on the texture sizes, this may need to be called over several frames * @param loadObjectTemplateTilesets True if tilesets used by object templates should be loaded * @param textureAtlas The {@link TextureAtlas} to load textures from * @return True if all tilesets + textures have been loaded, otherwise false */ public boolean loadTilesetTextures(TextureAtlas textureAtlas, boolean loadObjectTemplateTilesets) { final long startTime = System.currentTimeMillis(); for (int i = 0; i < tilesets.size; i++) { final Tileset tileset = tilesets.get(i); if(tileset.isTextureLoaded()) { continue; } if(!loadObjectTemplateTilesets && OBJECT_TEMPLATE_TILESET_SOURCES.contains(tileset.getSourceInternalUuid())) { continue; } tileset.loadTexture(textureAtlas); if(System.currentTimeMillis() - startTime >= MAX_TILESET_LOAD_TIMESLICE_MILLIS) { return false; } } return true; } @Override public void onBeginParsing(String orientation, String staggerAxis, String staggerIndex, Color backgroundColor, int width, int height, int tileWidth, int tileHeight, int sideLength) { this.orientationValue = orientation; try { this.orientation = Orientation.valueOf(orientation.toUpperCase()); } catch (Exception e) { this.orientation = Orientation.UNKNOWN; } if (backgroundColor != null) { this.backgroundColor = backgroundColor; } if (staggerAxis != null) { this.staggerAxis = StaggerAxis.valueOf(staggerAxis.toUpperCase()); if (sideLength < 0) { switch (this.staggerAxis) { case X: sideLength = tileWidth / 2; break; case Y: default: sideLength = tileHeight / 2; break; } } } if (staggerIndex != null) { this.staggerIndex = StaggerIndex.valueOf(staggerIndex.toUpperCase()); } this.width = width; this.height = height; this.tileWidth = tileWidth; this.tileHeight = tileHeight; this.sideLength = sideLength; switch (this.orientation) { case HEXAGONAL: switch (this.staggerAxis) { case X: this.pixelWidth = MathUtils.round(((tileWidth * 0.75f) * width) + (tileWidth * 0.25f)); this.pixelHeight = MathUtils.round((tileHeight * height) + (tileHeight * 0.5f)); break; case Y: default: this.pixelWidth = MathUtils.round((tileWidth * width) + (tileWidth * 0.5f)); this.pixelHeight = MathUtils.round(((tileHeight * 0.75f) * height) + (tileHeight * 0.25f)); break; } break; case ISOMETRIC_STAGGERED: break; case ISOMETRIC: case ORTHOGONAL: case UNKNOWN: default: this.pixelWidth = width * tileWidth; this.pixelHeight = height * tileHeight; break; } } /** * Returns if the map contains the specified property * * @param propertyName * The property name to search for * @return True if the map contains the property */ public boolean containsProperty(String propertyName) { if (properties == null) return false; return properties.containsKey(propertyName); } /** * Returns the value of a specified property * * @param propertyName * The property name to search for * @return Null if there is no such property */ public String getProperty(String propertyName) { if (properties == null) return null; return properties.get(propertyName); } /** * Sets the value of a specified property * * @param propertyName * The property name to set the value for * @param value * The value of the property to set */ public void setProperty(String propertyName, String value) { if (properties == null) properties = new ObjectMap<String, String>(); properties.put(propertyName, value); } @Override public void onMapPropertyParsed(String propertyName, String value) { setProperty(propertyName, value); } @Override public void onTilePropertiesParsed(Tile tile) { if (tile.getTileRenderer() == null) { return; } if (tile.getTileRenderer() instanceof AnimatedTileRenderer) { if (animatedTiles == null) { animatedTiles = new Array<Tile>(true,8, Tile.class); } animatedTiles.add(tile); } } @Override public void onTilesetParsed(Tileset parsedTileset) { if(tilesetGids.add(parsedTileset.getFirstGid())) { tilesets.add(parsedTileset); } } @Override public void onTileLayerParsed(TileLayer parsedLayer) { parsedLayer.setIndex(layers.size); layers.add(parsedLayer); } @Override public void onObjectGroupParsed(TiledObjectGroup parsedObjectGroup) { parsedObjectGroup.setIndex(layers.size); layers.add(parsedObjectGroup); objectGroups.put(parsedObjectGroup.getName(), parsedObjectGroup); } @Override public void onGroupLayerParsed(GroupLayer parsedLayer) { parsedLayer.setIndex(layers.size); layers.add(parsedLayer); } @Override public void onObjectTemplateParsed(TiledObjectTemplate parsedObjectTemplate) { OBJECT_TEMPLATE_TILESET_SOURCES.add(parsedObjectTemplate.getTileset().getSourceInternalUuid()); } /** * Returns the {@link TileLayer} with the given name * * @param name The name to search for * @return Null if there is no such {@link TileLayer} */ public TileLayer getTileLayer(String name) { return getTileLayer(name, true); } /** * Returns the {@link TileLayer} with the given name * * @param name The name to search for * @param recursive False if only the root's immediate child layers should be searched (ignoring descendants) * @return Null if there is no such {@link TileLayer} */ public TileLayer getTileLayer(String name, boolean recursive) { return getTileLayer(layers, name, recursive); } /** * Returns the {@link TileLayer} with the given name * * @param layers The layers to search through * @param name The name to search for * @param recursive False if only the immediate layers should be searched (ignoring descendants) * @return Null if there is no such {@link TileLayer} */ public static TileLayer getTileLayer(final Array<Layer> layers, String name, boolean recursive) { for (Layer layer : layers) { if (layer.getLayerType().equals(LayerType.TILE)) { if (layer.getName().compareTo(name) == 0) { return (TileLayer) layer; } } else if(recursive && layer.getLayerType().equals(LayerType.GROUP)) { GroupLayer groupLayer = (GroupLayer) layer; TileLayer result = getTileLayer(groupLayer.layers, name, recursive); if(result != null) { return result; } } } return null; } /** * Returns the {@link TileLayer} at the given index * * @param index * The index of the layer * @return Null if the index is out of bounds */ public TileLayer getTileLayer(int index) { if (index < 0 || index >= layers.size) { return null; } return (TileLayer) layers.get(index); } /** * Returns the {@link TiledObjectGroup} with the given name * * @param name * The name to search for * @return Null if there is no such {@link TiledObjectGroup} */ public TiledObjectGroup getObjectGroup(String name) { return getObjectGroup(name, true); } /** * Returns the {@link TiledObjectGroup} with the given name * * @param name The name to search for * @param recursive False if only the immediate layers should be searched (ignoring descendants) * @return Null if there is no such {@link TiledObjectGroup} */ public TiledObjectGroup getObjectGroup(String name, boolean recursive) { return getObjectGroup(layers, objectGroups, name, recursive); } /** * Returns the {@link TiledObjectGroup} with the given name * * @param layers The layers to search through * @param objectGroups A map of layer names to object groups * @param name The name to search for * @param recursive False if only the immediate layers should be searched (ignoring descendants) * @return Null if there is no such {@link TiledObjectGroup} */ public static TiledObjectGroup getObjectGroup(final Array<Layer> layers, final ObjectMap<String, TiledObjectGroup> objectGroups, String name, boolean recursive) { TiledObjectGroup result = objectGroups.get(name, null); if(result != null) { return result; } if(!recursive) { return null; } for (Layer layer : layers) { if (!layer.getLayerType().equals(LayerType.GROUP)) { continue; } final GroupLayer groupLayer = (GroupLayer) layer; result = getObjectGroup(groupLayer.layers, groupLayer.objectGroups, name, recursive); if(result != null) { return result; } } return null; } /** * Returns the {@link GroupLayer} with the given name * @param name The name of the layer * @return Null if the layer does not exist */ public GroupLayer getGroupLayer(String name) { return getGroupLayer(name, true); } /** * Returns the {@link GroupLayer} with the given name * @param name The name of the layer * @param recursive False if only the root's immediate child layers should be searched (ignoring descendants) * @return Null if the layer does not exist */ public GroupLayer getGroupLayer(String name, boolean recursive) { return getGroupLayer(layers, name, recursive); } /** * Returns the {@link GroupLayer} with the given name * @param layers The layers to search through * @param name The name of the layer * @param recursive False if only the immediate layers should be searched (ignoring descendants) * @return Null if the layer does not exist */ public static GroupLayer getGroupLayer(final Array<Layer> layers, String name, boolean recursive) { for (Layer layer : layers) { if (!layer.getLayerType().equals(LayerType.GROUP)) { continue; } if (layer.getName().compareTo(name) == 0) { return (GroupLayer) layer; } else if(recursive) { GroupLayer result = getGroupLayer(((GroupLayer) layer).layers, name, recursive); if(result != null) { return result; } } } return null; } /** * Returns the {@link GroupLayer} at the given index * @param index The index of the layer * @return Null if the index is out of bounds */ public GroupLayer getGroupLayer(int index) { if (index < 0 || index >= layers.size) { return null; } return (GroupLayer) layers.get(index); } /** * Returns all the {@link TiledObjectGroup}s in this map * * @return Null if there are no {@link TiledObjectGroup}s */ public Iterable<TiledObjectGroup> getObjectGroups() { if (objectGroups.size == 0) { return null; } return objectGroups.values(); } /** * Returns the index of the {@link TileLayer} or {@link TiledObjectGroup} * with the given name * * @param name * The name to search for * @return -1 if there is no such {@link TileLayer} or * {@link TiledObjectGroup} */ public int getLayerIndex(String name) { for (int i = 0; i < layers.size; i++) { Layer layer = layers.get(i); if (layer.getName().compareTo(name) == 0) { return i; } } return -1; } /** * Returns the index of the {@link TileLayer} or {@link TiledObjectGroup} * with the given name ignoring upper/lowercase differences * * @param name * The name to search for * @return -1 if there is no such {@link TileLayer} or * {@link TiledObjectGroup} */ public int getLayerIndexIgnoreCase(String name) { for (int i = 0; i < layers.size; i++) { Layer layer = layers.get(i); if (layer.getName().compareToIgnoreCase(name) == 0) { return i; } } return -1; } /** * Returns the {@link Tile} for the given tile ID * * @param tileId * The tile ID to search for * @return Null if there is no {@link Tile} with the given ID */ public Tile getTile(int tileId) { for (int i = 0; i < tilesets.size; i++) { if (tilesets.get(i).contains(tileId)) { return tilesets.get(i).getTile(tileId); } } return null; } /** * Returns the {@link Tile} at the given coordinate on a specific layer * * @param x * The x coordinate (in tiles) * @param y * The y coordinate (in tiles) * @param layer * The layer index * @return Null if there is no {@link Tile} */ public Tile getTile(int x, int y, int layer) { Layer tiledLayer = layers.get(layer); if (!tiledLayer.getLayerType().equals(LayerType.TILE)) { return null; } return getTile(((TileLayer) tiledLayer).getTileId(x, y)); } /** * Returns the {@link Orientation} of this map * * @return */ public Orientation getOrientation() { return orientation; } /** * Returns the {@link StaggerAxis} of this map * * @return Null if there is no value */ public StaggerAxis getStaggerAxis() { return staggerAxis; } /** * Returns the {@link StaggerIndex} of this map * * @return Null if there is no value */ public StaggerIndex getStaggerIndex() { return staggerIndex; } /** * Returns the stagger side length of this map * * @return -1 if there is no value */ public int getSideLength() { return sideLength; } /** * Returns the width of the map in tiles * * @return */ public int getWidth() { return width; } /** * Returns the height of the map in tiles * * @return */ public int getHeight() { return height; } /** * Returns the width of tiles in pixels * * @return */ public int getTileWidth() { return tileWidth; } /** * Returns the height of tiles in pixels * * @return */ public int getTileHeight() { return tileHeight; } /** * Returns the width of the map in pixels * * @return */ public int getPixelWidth() { return pixelWidth; } /** * Return the height of the map in pixels * * @return */ public int getPixelHeight() { return pixelHeight; } /** * Returns the {@link Tileset}s of this map * * @return An empty list if none have been loaded */ public Array<Tileset> getTilesets() { return tilesets; } /** * Returns the {@link Layer}s of this map * * @return */ public Array<Layer> getLayers() { return layers; } public Array<Tile> getAnimatedTiles() { return animatedTiles; } /** * Returns the total amount of {@link TiledObjectGroup} instances * * @return */ public int getTotalObjectGroups() { return objectGroups.size; } /** * Returns the total amount of {@link Layer} instances * * @return */ public int getTotalLayers() { return layers.size; } /** * Returns the background {@link Color} of the map * * @return null by default */ public Color getBackgroundColor() { return backgroundColor; } /** * Returns if this map contains animated tiles * * @return True if there are animated tiles */ public boolean containsAnimatedTiles() { if (animatedTiles == null) { return false; } return animatedTiles.size > 0; } /** * Returns if the {@link Tileset} images have been loaded * * @return True if they have been loaded */ public boolean isTilesetTexturesLoaded() { return isTilesetTexturesLoaded(false); } /** * Returns if the {@link Tileset} images have been loaded * @param ignoreObjectTemplateTilesets True if tilesets referenced by object templates should be ignored * @return True if they have been loaded */ public boolean isTilesetTexturesLoaded(boolean ignoreObjectTemplateTilesets) { for (int i = 0; i < tilesets.size; i++) { final Tileset tileset = tilesets.get(i); if (ignoreObjectTemplateTilesets && OBJECT_TEMPLATE_TILESET_SOURCES.contains(tileset.getSourceInternalUuid())) { continue; } if (!tileset.isTextureLoaded()) { return false; } } return true; } /** * Returns the {@link FileHandle} for this data * * @return */ public FileHandle getFileHandle() { return fileHandle; } /** * Sets the {@link FileHandle} for this data * @param fileHandle */ public void setFileHandle(FileHandle fileHandle) { this.fileHandle = fileHandle; } /** * Releases any resources used by this TiledMap including tilesets */ public void dispose() { dispose(true); } /** * Releases any resources used by this TiledMap * * @param disposeTilesets * True if tilesets should also be disposed */ public void dispose(boolean disposeTilesets) { if (!disposeTilesets) { return; } for (int i = 0; i < tilesets.size; i++) { tilesets.get(i).dispose(); } tilesets.clear(); for(int i = 0; i < layers.size; i++) { layers.get(i).dispose(); } layers.clear(); } /** * Returns the list of Tilesets loaded via object templates * @return An empty set if no tilesets loaded via templates */ public static ObjectSet<String> getObjectTemplateTilesetSources() { return OBJECT_TEMPLATE_TILESET_SOURCES; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TiledMapData that = (TiledMapData) o; return width == that.width && height == that.height && tileWidth == that.tileWidth && tileHeight == that.tileHeight && pixelWidth == that.pixelWidth && pixelHeight == that.pixelHeight && sideLength == that.sideLength && Objects.equals(tilesets, that.tilesets) && Objects.equals(tilesetGids, that.tilesetGids) && Objects.equals(layers, that.layers) && Objects.equals(orientationValue, that.orientationValue) && orientation == that.orientation && staggerAxis == that.staggerAxis && staggerIndex == that.staggerIndex && Objects.equals(backgroundColor, that.backgroundColor) && Objects.equals(animatedTiles, that.animatedTiles) && Objects.equals(properties, that.properties); } @Override public int hashCode() { return Objects.hash(tilesets, tilesetGids, layers, orientationValue, orientation, staggerAxis, staggerIndex, width, height, tileWidth, tileHeight, pixelWidth, pixelHeight, sideLength, backgroundColor, animatedTiles, properties); } @Override public String toString() { return "TiledMapData{" + "tilesets=" + tilesets + ", layers=" + layers + ", orientationValue='" + orientationValue + '\'' + ", orientation=" + orientation + ", staggerAxis=" + staggerAxis + ", staggerIndex=" + staggerIndex + ", width=" + width + ", height=" + height + ", tileWidth=" + tileWidth + ", tileHeight=" + tileHeight + ", pixelWidth=" + pixelWidth + ", pixelHeight=" + pixelHeight + ", sideLength=" + sideLength + ", backgroundColor=" + backgroundColor + ", properties=" + properties + '}'; } }
tiled/src/main/java/org/mini2Dx/tiled/TiledMapData.java
/******************************************************************************* * Copyright 2019 See AUTHORS file * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package org.mini2Dx.tiled; import org.mini2Dx.core.Mdx; import org.mini2Dx.core.assets.AssetDescriptor; import org.mini2Dx.core.assets.AssetManager; import org.mini2Dx.core.files.FileHandle; import org.mini2Dx.core.files.FileType; import org.mini2Dx.core.graphics.Color; import org.mini2Dx.core.graphics.TextureAtlas; import org.mini2Dx.core.serialization.GameDataSerializable; import org.mini2Dx.core.serialization.GameDataSerializableUtils; import org.mini2Dx.gdx.math.MathUtils; import org.mini2Dx.gdx.utils.Array; import org.mini2Dx.gdx.utils.IntSet; import org.mini2Dx.gdx.utils.ObjectMap; import org.mini2Dx.gdx.utils.ObjectSet; import org.mini2Dx.tiled.exception.TiledException; import org.mini2Dx.tiled.exception.TiledParsingException; import org.mini2Dx.tiled.renderer.AnimatedTileRenderer; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.util.Objects; /** * Data parsed from a TMX file created with Tiled */ public class TiledMapData implements TiledParserListener, GameDataSerializable { public static long MAX_TILESET_LOAD_TIMESLICE_MILLIS = 2L; static final ObjectSet<String> OBJECT_TEMPLATE_TILESET_SOURCES = new ObjectSet<String>(); protected FileHandle fileHandle; protected final Array<Tileset> tilesets = new Array<Tileset>(true, 2, Tileset.class); protected final IntSet tilesetGids = new IntSet(); protected final Array<Layer> layers = new Array<Layer>(true, 2, Layer.class); protected final ObjectMap<String, TiledObjectGroup> objectGroups = new ObjectMap<String, TiledObjectGroup>(8); private String orientationValue; private Orientation orientation; private StaggerAxis staggerAxis; private StaggerIndex staggerIndex; private int width, height, tileWidth, tileHeight, pixelWidth, pixelHeight, sideLength; private Color backgroundColor; private Array<Tile> animatedTiles; private ObjectMap<String, String> properties; /** * * @param fileHandle * @throws TiledException */ public TiledMapData(FileHandle fileHandle) { this(new TiledParser(), fileHandle); } /** * * @param tiledParser * @param fileHandle * @throws TiledException */ public TiledMapData(TiledParser tiledParser, FileHandle fileHandle) { super(); this.fileHandle = fileHandle; tiledParser.setListener(this); try { tiledParser.parseTmx(fileHandle); } catch (IOException e) { tiledParser.setListener(null); throw new TiledParsingException(e); } tiledParser.setListener(null); } private TiledMapData(boolean load, FileHandle fileHandle) { super(); this.fileHandle = fileHandle; } public static TiledMapData fromInputStream(DataInputStream inputStream) throws IOException { final String path = inputStream.readUTF(); final FileType fileType = FileType.valueOf(inputStream.readUTF()); final TiledMapData mapData; switch (fileType) { default: case INTERNAL: mapData = new TiledMapData(false, Mdx.files.internal(path)); break; case EXTERNAL: mapData = new TiledMapData(false, Mdx.files.external(path)); break; case LOCAL: mapData = new TiledMapData(false, Mdx.files.local(path)); break; } mapData.readData(inputStream); return mapData; } @Override public void writeData(DataOutputStream outputStream) throws IOException { outputStream.writeUTF(fileHandle.path()); outputStream.writeUTF(fileHandle.type().name()); outputStream.writeInt(OBJECT_TEMPLATE_TILESET_SOURCES.size); for(String objectTemplateTilesetSources : OBJECT_TEMPLATE_TILESET_SOURCES) { outputStream.writeUTF(objectTemplateTilesetSources); } GameDataSerializableUtils.writeString(orientationValue, outputStream); GameDataSerializableUtils.writeString(orientation == null ? null : orientation.name(), outputStream); GameDataSerializableUtils.writeString(staggerAxis == null ? null : staggerAxis.name(), outputStream); GameDataSerializableUtils.writeString(staggerIndex == null ? null : staggerIndex.name(), outputStream); outputStream.writeInt(width); outputStream.writeInt(height); outputStream.writeInt(tileWidth); outputStream.writeInt(tileHeight); outputStream.writeInt(pixelWidth); outputStream.writeInt(pixelHeight); outputStream.writeInt(sideLength); outputStream.writeBoolean(backgroundColor != null); if(backgroundColor != null) { outputStream.writeFloat(backgroundColor.rf()); outputStream.writeFloat(backgroundColor.gf()); outputStream.writeFloat(backgroundColor.bf()); outputStream.writeFloat(backgroundColor.af()); } outputStream.writeInt(properties == null ? 0 : properties.size); if(properties != null && properties.size > 0) { for(String key : properties.keys()) { outputStream.writeUTF(key); GameDataSerializableUtils.writeString(properties.get(key, null), outputStream); } } outputStream.writeInt(tilesets.size); for(int i = 0; i < tilesets.size; i++) { tilesets.get(i).writeData(outputStream); } outputStream.writeInt(layers.size); for(int i = 0; i < layers.size; i++) { layers.get(i).writeData(outputStream); } } @Override public void readData(DataInputStream inputStream) throws IOException { final int totalTemplateTilesetSources = inputStream.readInt(); for(int i = 0; i < totalTemplateTilesetSources; i++) { OBJECT_TEMPLATE_TILESET_SOURCES.add(inputStream.readUTF()); } orientationValue = GameDataSerializableUtils.readString(inputStream); final String orientation = GameDataSerializableUtils.readString(inputStream); if(orientation != null) { this.orientation = Orientation.valueOf(orientation); } final String staggerAxis = GameDataSerializableUtils.readString(inputStream); if(staggerAxis != null) { this.staggerAxis = StaggerAxis.valueOf(staggerAxis); } final String staggerIndex = GameDataSerializableUtils.readString(inputStream); if(staggerIndex != null) { this.staggerIndex = StaggerIndex.valueOf(staggerIndex); } width = inputStream.readInt(); height = inputStream.readInt(); tileWidth = inputStream.readInt(); tileHeight = inputStream.readInt(); pixelWidth = inputStream.readInt(); pixelHeight = inputStream.readInt(); sideLength = inputStream.readInt(); final boolean backgroundColorExists = inputStream.readBoolean(); if(backgroundColorExists) { final float r = inputStream.readFloat(); final float g = inputStream.readFloat(); final float b = inputStream.readFloat(); final float a = inputStream.readFloat(); backgroundColor = Mdx.graphics.newColor(r, g, b, a); } final int totalProperties = inputStream.readInt(); if(totalProperties > 0) { properties = new ObjectMap<>(); for(int i = 0; i < totalProperties; i++) { final String key = inputStream.readUTF(); final String value = GameDataSerializableUtils.readString(inputStream); properties.put(key, value); } } final int totalTilesets = inputStream.readInt(); for(int i = 0; i < totalTilesets; i++) { onTilesetParsed(Tileset.fromInputStream(this, inputStream)); } final int totalLayers = inputStream.readInt(); for(int i = 0; i < totalLayers; i++) { final Layer layer = Layer.fromInputStream(inputStream); switch (layer.getLayerType()) { default: case TILE: onTileLayerParsed((TileLayer) layer); break; case OBJECT: onObjectGroupParsed((TiledObjectGroup) layer); break; case IMAGE: break; case GROUP: onGroupLayerParsed((GroupLayer) layer); break; } } } public Array<AssetDescriptor> getDependencies() { Array<AssetDescriptor> dependencies = new Array<AssetDescriptor>(); for (int i = 0; i < tilesets.size; i++) { dependencies.addAll(tilesets.get(i).getDependencies(fileHandle)); } return dependencies; } /** * Loads all {@link Tileset} textures for this map if they are not already loaded<br> * Note: Depending on the texture sizes, this may need to be called over several frames * @return True if all tilesets + textures have been loaded, otherwise false */ public boolean loadTilesetTextures() { return loadTilesetTextures(true); } /** * Loads all {@link Tileset} textures for this map if they are not already loaded * <br>Note: Depending on the texture sizes, this may need to be called over several frames * @param assetManager The {@link AssetManager} to use * @return True if all tilesets + textures have been loaded, otherwise false */ public boolean loadTilesetTextures(AssetManager assetManager) { return loadTilesetTextures(assetManager, true); } /** * Loads all {@link Tileset} textures for this map if they are not already loaded * <br>Note: Depending on the texture sizes, this may need to be called over several frames * @param textureAtlas The {@link TextureAtlas} to load textures from * @return True if all tilesets + textures have been loaded, otherwise false */ public boolean loadTilesetTextures(TextureAtlas textureAtlas) { return loadTilesetTextures(textureAtlas, true); } /** * Loads all {@link Tileset} textures for this map if they are not already loaded * <br>Note: Depending on the texture sizes, this may need to be called over several frames * @param loadObjectTemplateTilesets True if tilesets used by object templates should be loaded * @return True if all tilesets + textures have been loaded, otherwise false */ public boolean loadTilesetTextures(boolean loadObjectTemplateTilesets) { final long startTime = System.currentTimeMillis(); for (int i = 0; i < tilesets.size; i++) { final Tileset tileset = tilesets.get(i); if(tileset.isTextureLoaded()) { continue; } if(!loadObjectTemplateTilesets && OBJECT_TEMPLATE_TILESET_SOURCES.contains(tileset.getSourceInternalUuid())) { continue; } tileset.loadTexture(fileHandle); if(System.currentTimeMillis() - startTime >= MAX_TILESET_LOAD_TIMESLICE_MILLIS) { return false; } } return true; } /** * Loads all {@link Tileset} textures for this map if they are not already loaded * <br>Note: Depending on the texture sizes, this may need to be called over several frames * @param loadObjectTemplateTilesets True if tilesets used by object templates should be loaded * @param assetManager The {@link AssetManager} to use * @return True if all tilesets + textures have been loaded, otherwise false */ public boolean loadTilesetTextures(AssetManager assetManager, boolean loadObjectTemplateTilesets) { final long startTime = System.currentTimeMillis(); for (int i = 0; i < tilesets.size; i++) { final Tileset tileset = tilesets.get(i); if(tileset.isTextureLoaded()) { continue; } if(!loadObjectTemplateTilesets && OBJECT_TEMPLATE_TILESET_SOURCES.contains(tileset.getSourceInternalUuid())) { continue; } tileset.loadTexture(assetManager, fileHandle); if(System.currentTimeMillis() - startTime >= MAX_TILESET_LOAD_TIMESLICE_MILLIS) { return false; } } return true; } /** * Loads all {@link Tileset} textures for this map if they are not already loaded * <br>Note: Depending on the texture sizes, this may need to be called over several frames * @param loadObjectTemplateTilesets True if tilesets used by object templates should be loaded * @param textureAtlas The {@link TextureAtlas} to load textures from * @return True if all tilesets + textures have been loaded, otherwise false */ public boolean loadTilesetTextures(TextureAtlas textureAtlas, boolean loadObjectTemplateTilesets) { final long startTime = System.currentTimeMillis(); for (int i = 0; i < tilesets.size; i++) { final Tileset tileset = tilesets.get(i); if(tileset.isTextureLoaded()) { continue; } if(!loadObjectTemplateTilesets && OBJECT_TEMPLATE_TILESET_SOURCES.contains(tileset.getSourceInternalUuid())) { continue; } tileset.loadTexture(textureAtlas); if(System.currentTimeMillis() - startTime >= MAX_TILESET_LOAD_TIMESLICE_MILLIS) { return false; } } return true; } @Override public void onBeginParsing(String orientation, String staggerAxis, String staggerIndex, Color backgroundColor, int width, int height, int tileWidth, int tileHeight, int sideLength) { this.orientationValue = orientation; try { this.orientation = Orientation.valueOf(orientation.toUpperCase()); } catch (Exception e) { this.orientation = Orientation.UNKNOWN; } if (backgroundColor != null) { this.backgroundColor = backgroundColor; } if (staggerAxis != null) { this.staggerAxis = StaggerAxis.valueOf(staggerAxis.toUpperCase()); if (sideLength < 0) { switch (this.staggerAxis) { case X: sideLength = tileWidth / 2; break; case Y: default: sideLength = tileHeight / 2; break; } } } if (staggerIndex != null) { this.staggerIndex = StaggerIndex.valueOf(staggerIndex.toUpperCase()); } this.width = width; this.height = height; this.tileWidth = tileWidth; this.tileHeight = tileHeight; this.sideLength = sideLength; switch (this.orientation) { case HEXAGONAL: switch (this.staggerAxis) { case X: this.pixelWidth = MathUtils.round(((tileWidth * 0.75f) * width) + (tileWidth * 0.25f)); this.pixelHeight = MathUtils.round((tileHeight * height) + (tileHeight * 0.5f)); break; case Y: default: this.pixelWidth = MathUtils.round((tileWidth * width) + (tileWidth * 0.5f)); this.pixelHeight = MathUtils.round(((tileHeight * 0.75f) * height) + (tileHeight * 0.25f)); break; } break; case ISOMETRIC_STAGGERED: break; case ISOMETRIC: case ORTHOGONAL: case UNKNOWN: default: this.pixelWidth = width * tileWidth; this.pixelHeight = height * tileHeight; break; } } /** * Returns if the map contains the specified property * * @param propertyName * The property name to search for * @return True if the map contains the property */ public boolean containsProperty(String propertyName) { if (properties == null) return false; return properties.containsKey(propertyName); } /** * Returns the value of a specified property * * @param propertyName * The property name to search for * @return Null if there is no such property */ public String getProperty(String propertyName) { if (properties == null) return null; return properties.get(propertyName); } /** * Sets the value of a specified property * * @param propertyName * The property name to set the value for * @param value * The value of the property to set */ public void setProperty(String propertyName, String value) { if (properties == null) properties = new ObjectMap<String, String>(); properties.put(propertyName, value); } @Override public void onMapPropertyParsed(String propertyName, String value) { setProperty(propertyName, value); } @Override public void onTilePropertiesParsed(Tile tile) { if (tile.getTileRenderer() == null) { return; } if (tile.getTileRenderer() instanceof AnimatedTileRenderer) { if (animatedTiles == null) { animatedTiles = new Array<Tile>(true,1, Tile.class); } animatedTiles.add(tile); } } @Override public void onTilesetParsed(Tileset parsedTileset) { if(tilesetGids.add(parsedTileset.getFirstGid())) { tilesets.add(parsedTileset); } } @Override public void onTileLayerParsed(TileLayer parsedLayer) { parsedLayer.setIndex(layers.size); layers.add(parsedLayer); } @Override public void onObjectGroupParsed(TiledObjectGroup parsedObjectGroup) { parsedObjectGroup.setIndex(layers.size); layers.add(parsedObjectGroup); objectGroups.put(parsedObjectGroup.getName(), parsedObjectGroup); } @Override public void onGroupLayerParsed(GroupLayer parsedLayer) { parsedLayer.setIndex(layers.size); layers.add(parsedLayer); } @Override public void onObjectTemplateParsed(TiledObjectTemplate parsedObjectTemplate) { OBJECT_TEMPLATE_TILESET_SOURCES.add(parsedObjectTemplate.getTileset().getSourceInternalUuid()); } /** * Returns the {@link TileLayer} with the given name * * @param name The name to search for * @return Null if there is no such {@link TileLayer} */ public TileLayer getTileLayer(String name) { return getTileLayer(name, true); } /** * Returns the {@link TileLayer} with the given name * * @param name The name to search for * @param recursive False if only the root's immediate child layers should be searched (ignoring descendants) * @return Null if there is no such {@link TileLayer} */ public TileLayer getTileLayer(String name, boolean recursive) { return getTileLayer(layers, name, recursive); } /** * Returns the {@link TileLayer} with the given name * * @param layers The layers to search through * @param name The name to search for * @param recursive False if only the immediate layers should be searched (ignoring descendants) * @return Null if there is no such {@link TileLayer} */ public static TileLayer getTileLayer(final Array<Layer> layers, String name, boolean recursive) { for (Layer layer : layers) { if (layer.getLayerType().equals(LayerType.TILE)) { if (layer.getName().compareTo(name) == 0) { return (TileLayer) layer; } } else if(recursive && layer.getLayerType().equals(LayerType.GROUP)) { GroupLayer groupLayer = (GroupLayer) layer; TileLayer result = getTileLayer(groupLayer.layers, name, recursive); if(result != null) { return result; } } } return null; } /** * Returns the {@link TileLayer} at the given index * * @param index * The index of the layer * @return Null if the index is out of bounds */ public TileLayer getTileLayer(int index) { if (index < 0 || index >= layers.size) { return null; } return (TileLayer) layers.get(index); } /** * Returns the {@link TiledObjectGroup} with the given name * * @param name * The name to search for * @return Null if there is no such {@link TiledObjectGroup} */ public TiledObjectGroup getObjectGroup(String name) { return getObjectGroup(name, true); } /** * Returns the {@link TiledObjectGroup} with the given name * * @param name The name to search for * @param recursive False if only the immediate layers should be searched (ignoring descendants) * @return Null if there is no such {@link TiledObjectGroup} */ public TiledObjectGroup getObjectGroup(String name, boolean recursive) { return getObjectGroup(layers, objectGroups, name, recursive); } /** * Returns the {@link TiledObjectGroup} with the given name * * @param layers The layers to search through * @param objectGroups A map of layer names to object groups * @param name The name to search for * @param recursive False if only the immediate layers should be searched (ignoring descendants) * @return Null if there is no such {@link TiledObjectGroup} */ public static TiledObjectGroup getObjectGroup(final Array<Layer> layers, final ObjectMap<String, TiledObjectGroup> objectGroups, String name, boolean recursive) { TiledObjectGroup result = objectGroups.get(name, null); if(result != null) { return result; } if(!recursive) { return null; } for (Layer layer : layers) { if (!layer.getLayerType().equals(LayerType.GROUP)) { continue; } final GroupLayer groupLayer = (GroupLayer) layer; result = getObjectGroup(groupLayer.layers, groupLayer.objectGroups, name, recursive); if(result != null) { return result; } } return null; } /** * Returns the {@link GroupLayer} with the given name * @param name The name of the layer * @return Null if the layer does not exist */ public GroupLayer getGroupLayer(String name) { return getGroupLayer(name, true); } /** * Returns the {@link GroupLayer} with the given name * @param name The name of the layer * @param recursive False if only the root's immediate child layers should be searched (ignoring descendants) * @return Null if the layer does not exist */ public GroupLayer getGroupLayer(String name, boolean recursive) { return getGroupLayer(layers, name, recursive); } /** * Returns the {@link GroupLayer} with the given name * @param layers The layers to search through * @param name The name of the layer * @param recursive False if only the immediate layers should be searched (ignoring descendants) * @return Null if the layer does not exist */ public static GroupLayer getGroupLayer(final Array<Layer> layers, String name, boolean recursive) { for (Layer layer : layers) { if (!layer.getLayerType().equals(LayerType.GROUP)) { continue; } if (layer.getName().compareTo(name) == 0) { return (GroupLayer) layer; } else if(recursive) { GroupLayer result = getGroupLayer(((GroupLayer) layer).layers, name, recursive); if(result != null) { return result; } } } return null; } /** * Returns the {@link GroupLayer} at the given index * @param index The index of the layer * @return Null if the index is out of bounds */ public GroupLayer getGroupLayer(int index) { if (index < 0 || index >= layers.size) { return null; } return (GroupLayer) layers.get(index); } /** * Returns all the {@link TiledObjectGroup}s in this map * * @return Null if there are no {@link TiledObjectGroup}s */ public Iterable<TiledObjectGroup> getObjectGroups() { if (objectGroups.size == 0) { return null; } return objectGroups.values(); } /** * Returns the index of the {@link TileLayer} or {@link TiledObjectGroup} * with the given name * * @param name * The name to search for * @return -1 if there is no such {@link TileLayer} or * {@link TiledObjectGroup} */ public int getLayerIndex(String name) { for (int i = 0; i < layers.size; i++) { Layer layer = layers.get(i); if (layer.getName().compareTo(name) == 0) { return i; } } return -1; } /** * Returns the index of the {@link TileLayer} or {@link TiledObjectGroup} * with the given name ignoring upper/lowercase differences * * @param name * The name to search for * @return -1 if there is no such {@link TileLayer} or * {@link TiledObjectGroup} */ public int getLayerIndexIgnoreCase(String name) { for (int i = 0; i < layers.size; i++) { Layer layer = layers.get(i); if (layer.getName().compareToIgnoreCase(name) == 0) { return i; } } return -1; } /** * Returns the {@link Tile} for the given tile ID * * @param tileId * The tile ID to search for * @return Null if there is no {@link Tile} with the given ID */ public Tile getTile(int tileId) { for (int i = 0; i < tilesets.size; i++) { if (tilesets.get(i).contains(tileId)) { return tilesets.get(i).getTile(tileId); } } return null; } /** * Returns the {@link Tile} at the given coordinate on a specific layer * * @param x * The x coordinate (in tiles) * @param y * The y coordinate (in tiles) * @param layer * The layer index * @return Null if there is no {@link Tile} */ public Tile getTile(int x, int y, int layer) { Layer tiledLayer = layers.get(layer); if (!tiledLayer.getLayerType().equals(LayerType.TILE)) { return null; } return getTile(((TileLayer) tiledLayer).getTileId(x, y)); } /** * Returns the {@link Orientation} of this map * * @return */ public Orientation getOrientation() { return orientation; } /** * Returns the {@link StaggerAxis} of this map * * @return Null if there is no value */ public StaggerAxis getStaggerAxis() { return staggerAxis; } /** * Returns the {@link StaggerIndex} of this map * * @return Null if there is no value */ public StaggerIndex getStaggerIndex() { return staggerIndex; } /** * Returns the stagger side length of this map * * @return -1 if there is no value */ public int getSideLength() { return sideLength; } /** * Returns the width of the map in tiles * * @return */ public int getWidth() { return width; } /** * Returns the height of the map in tiles * * @return */ public int getHeight() { return height; } /** * Returns the width of tiles in pixels * * @return */ public int getTileWidth() { return tileWidth; } /** * Returns the height of tiles in pixels * * @return */ public int getTileHeight() { return tileHeight; } /** * Returns the width of the map in pixels * * @return */ public int getPixelWidth() { return pixelWidth; } /** * Return the height of the map in pixels * * @return */ public int getPixelHeight() { return pixelHeight; } /** * Returns the {@link Tileset}s of this map * * @return An empty list if none have been loaded */ public Array<Tileset> getTilesets() { return tilesets; } /** * Returns the {@link Layer}s of this map * * @return */ public Array<Layer> getLayers() { return layers; } public Array<Tile> getAnimatedTiles() { return animatedTiles; } /** * Returns the total amount of {@link TiledObjectGroup} instances * * @return */ public int getTotalObjectGroups() { return objectGroups.size; } /** * Returns the total amount of {@link Layer} instances * * @return */ public int getTotalLayers() { return layers.size; } /** * Returns the background {@link Color} of the map * * @return null by default */ public Color getBackgroundColor() { return backgroundColor; } /** * Returns if this map contains animated tiles * * @return True if there are animated tiles */ public boolean containsAnimatedTiles() { if (animatedTiles == null) { return false; } return animatedTiles.size > 0; } /** * Returns if the {@link Tileset} images have been loaded * * @return True if they have been loaded */ public boolean isTilesetTexturesLoaded() { return isTilesetTexturesLoaded(false); } /** * Returns if the {@link Tileset} images have been loaded * @param ignoreObjectTemplateTilesets True if tilesets referenced by object templates should be ignored * @return True if they have been loaded */ public boolean isTilesetTexturesLoaded(boolean ignoreObjectTemplateTilesets) { for (int i = 0; i < tilesets.size; i++) { final Tileset tileset = tilesets.get(i); if (ignoreObjectTemplateTilesets && OBJECT_TEMPLATE_TILESET_SOURCES.contains(tileset.getSourceInternalUuid())) { continue; } if (!tileset.isTextureLoaded()) { return false; } } return true; } /** * Returns the {@link FileHandle} for this data * * @return */ public FileHandle getFileHandle() { return fileHandle; } /** * Sets the {@link FileHandle} for this data * @param fileHandle */ public void setFileHandle(FileHandle fileHandle) { this.fileHandle = fileHandle; } /** * Releases any resources used by this TiledMap including tilesets */ public void dispose() { dispose(true); } /** * Releases any resources used by this TiledMap * * @param disposeTilesets * True if tilesets should also be disposed */ public void dispose(boolean disposeTilesets) { if (!disposeTilesets) { return; } for (int i = 0; i < tilesets.size; i++) { tilesets.get(i).dispose(); } tilesets.clear(); for(int i = 0; i < layers.size; i++) { layers.get(i).dispose(); } layers.clear(); } /** * Returns the list of Tilesets loaded via object templates * @return An empty set if no tilesets loaded via templates */ public static ObjectSet<String> getObjectTemplateTilesetSources() { return OBJECT_TEMPLATE_TILESET_SOURCES; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TiledMapData that = (TiledMapData) o; return width == that.width && height == that.height && tileWidth == that.tileWidth && tileHeight == that.tileHeight && pixelWidth == that.pixelWidth && pixelHeight == that.pixelHeight && sideLength == that.sideLength && Objects.equals(tilesets, that.tilesets) && Objects.equals(tilesetGids, that.tilesetGids) && Objects.equals(layers, that.layers) && Objects.equals(orientationValue, that.orientationValue) && orientation == that.orientation && staggerAxis == that.staggerAxis && staggerIndex == that.staggerIndex && Objects.equals(backgroundColor, that.backgroundColor) && Objects.equals(animatedTiles, that.animatedTiles) && Objects.equals(properties, that.properties); } @Override public int hashCode() { return Objects.hash(tilesets, tilesetGids, layers, orientationValue, orientation, staggerAxis, staggerIndex, width, height, tileWidth, tileHeight, pixelWidth, pixelHeight, sideLength, backgroundColor, animatedTiles, properties); } @Override public String toString() { return "TiledMapData{" + "tilesets=" + tilesets + ", layers=" + layers + ", orientationValue='" + orientationValue + '\'' + ", orientation=" + orientation + ", staggerAxis=" + staggerAxis + ", staggerIndex=" + staggerIndex + ", width=" + width + ", height=" + height + ", tileWidth=" + tileWidth + ", tileHeight=" + tileHeight + ", pixelWidth=" + pixelWidth + ", pixelHeight=" + pixelHeight + ", sideLength=" + sideLength + ", backgroundColor=" + backgroundColor + ", properties=" + properties + '}'; } }
Increase initial array size for animated tiles
tiled/src/main/java/org/mini2Dx/tiled/TiledMapData.java
Increase initial array size for animated tiles
Java
apache-2.0
96e5f7e71776e9d612fe31a2eee06445e084dbfa
0
LeifWarner/openid4java,janrain/openid4java,janrain/openid4java,svn2github/openid4java,LeifWarner/openid4java
/* * Copyright 2006-2007 Sxip Identity Corporation */ package org.openid4java.message.ax; import org.openid4java.message.ParameterList; import org.openid4java.message.MessageException; import org.openid4java.message.Parameter; import java.net.URL; import java.net.MalformedURLException; import java.util.*; import org.apache.log4j.Logger; /** * Implements the extension for Attribute Exchange fetch responses. * * @author Marius Scurtescu, Johnny Bufu */ public class FetchResponse extends AxMessage { private static Logger _log = Logger.getLogger(FetchResponse.class); private static final boolean DEBUG = _log.isDebugEnabled(); /** * Constructs a Fetch Response with an empty parameter list. */ protected FetchResponse() { _parameters.set(new Parameter("mode", "fetch_response")); if (DEBUG) _log.debug("Created empty fetch response."); } /** * Constructs a Fetch Response with an empty parameter list. */ public static FetchResponse createFetchResponse() { return new FetchResponse(); } /** * Constructs a FetchResponse from a parameter list. * <p> * The parameter list can be extracted from a received message with the * getExtensionParams method of the Message class, and MUST NOT contain * the "openid.<extension_alias>." prefix. */ protected FetchResponse(ParameterList params) { _parameters = params; } public static FetchResponse createFetchResponse(ParameterList params) throws MessageException { FetchResponse resp = new FetchResponse(params); if (! resp.isValid()) throw new MessageException("Invalid parameters for a fetch response"); if (DEBUG) _log.debug("Created fetch response from parameter list:\n" + params); return resp; } /** * Creates a FetchResponse from a FetchRequest message and the data released * by the user. * * @param req FetchRequest message. * @param userData The userData may be a Map<String alias, String value> * or a Map<String alias, List<String> values>. The attribute values * are provided by the calling application. If a list of values is * specified per attribute, at most n will be sent, where n is the * number of attribute values requested in the FetchRequest. * @return Properly formed FetchResponse. */ public static FetchResponse createFetchResponse(FetchRequest req, Map userData) { FetchResponse resp = new FetchResponse(); // go through each requested attribute Map attributes = req.getAttributes(); for (Iterator i = attributes.keySet().iterator(); i.hasNext(); ) { String alias = (String) i.next(); // find attribute in userData Object value = userData.get(alias); // if the value isn't there, skip over it if (value == null) { continue; } // if the value is a string, add the single attribute to the response if (value instanceof String) { resp.addAttribute(alias, (String) attributes.get(alias), (String)value); } // if the value is a list (of string) iteratively add each attribute to the response else if (value instanceof List) { Iterator values = ((List)value).iterator(); // only send up the the maximum requested number int max = req.getCount(alias); for (int count = 0; count < max && values.hasNext(); count++) { // if the value isn't there, skip over it String val = (String)values.next(); if (val == null) { count--; // disregard this as a value as we are skipping over it continue; } resp.addAttribute(alias, (String) attributes.get(alias), val); } } } return resp; } /** * Adds an attribute to the fetch response. * * @param alias The alias identifier that will be associated * with the attribute type URI. * @param typeUri The attribute type URI. * @param value The value of the attribute. */ public void addAttribute(String alias, String typeUri, String value) { int count = getCount(alias); String index = ""; switch(count) { case 0: _parameters.set(new Parameter("type." + alias, typeUri)); break; case 1: // rename the existing one _parameters.set(new Parameter("value." + alias + ".1", getParameterValue("value." + alias))); _parameters.removeParameters("value." + alias); index = ".2"; break; default: index = "." +Integer.toString(count + 1); } _parameters.set(new Parameter("value." + alias + index, value)); setCount(alias, ++count); if (DEBUG) _log.debug("Added new attribute to fetch response; type: " + typeUri + " alias: " + alias + " count: " + count); } /** * Returns a list with the attribute value(s) associated for the specified * attribute alias. * * @param alias The attribute alias. * @return List of attribute values. */ public List getAttributeValues(String alias) { List values = new ArrayList(); if (! _parameters.hasParameter("count." + alias)) values.add(getParameterValue("value." + alias)); else for (int i = 1; i <= getCount(alias); i++) values.add(getParameterValue("value." + alias + "." + Integer.toString(i))); return values; } //todo: public String getAttributeValue(String alias) /** * Gets a list of attribute aliases. */ public List getAttributeAliases() { List aliases = new ArrayList(); Iterator it = _parameters.getParameters().iterator(); while (it.hasNext()) { String paramName = ((Parameter) it.next()).getKey(); if (paramName.startsWith("value.")) { String alias; if (paramName.endsWith(".")) alias = paramName.substring(6, paramName.length() - 1); else alias = paramName.substring(6); if ( ! aliases.contains(alias) ) aliases.add(alias); } } return aliases; } /** * Gets a map with attribute aliases -> list of values. */ public Map getAttributes() { Map attributes = new HashMap(); Iterator it = _parameters.getParameters().iterator(); while (it.hasNext()) { String paramName = ((Parameter) it.next()).getKey(); if (paramName.startsWith("value.")) { String alias; if (paramName.endsWith(".")) alias = paramName.substring(6, paramName.length() - 1); else alias = paramName.substring(6); if ( ! attributes.containsKey(alias) ) attributes.put(alias, getAttributeValues(alias)); } } return attributes; } /** * Gets the number of values provided in the fetch response for the * specified attribute alias. * * @param alias The attribute alias. */ public int getCount(String alias) { if (_parameters.hasParameter("count." + alias)) return Integer.parseInt(_parameters.getParameterValue("count." + alias)); else if (_parameters.hasParameter("value." + alias)) return 1; else return 0; } /** * Sets the number of values provided in the fetch response for the * specified attribute alias. * * @param alias The attribute alias. * @param count The number of values. */ private void setCount(String alias, int count) { // make sure that count.< alias >.1 is removed _parameters.removeParameters("count." + alias); if (count > 1) _parameters.set(new Parameter("count." + alias, Integer.toString(count))); } /** * Sets the optional 'update_url' parameter where the OP can later re-post * fetch-response updates for the values of the requested attributes. * * @param updateUrl The URL where the RP accepts later updates * for the requested attributes. */ public void setUpdateUrl(String updateUrl) throws MessageException { try { new URL(updateUrl); } catch (MalformedURLException e) { throw new MessageException("Invalid update_url: " + updateUrl); } if (DEBUG) _log.debug("Setting fetch response update_url: " + updateUrl); _parameters.set(new Parameter("update_url", updateUrl)); } /** * Gets the optional 'update_url' parameter if available, or null otherwise. */ public String getUpdateUrl() { return _parameters.hasParameter("update_url") ? _parameters.getParameterValue("update_url") : null; } /** * Checks the validity of the extension. * <p> * Used when constructing a extension from a parameter list. * * @return True if the extension is valid, false otherwise. */ private boolean isValid() { Iterator it = _parameters.getParameters().iterator(); while (it.hasNext()) { String paramName = ((Parameter) it.next()).getKey(); if (! paramName.equals("mode") && ! paramName.startsWith("type.") && ! paramName.startsWith("count.") && ! paramName.startsWith("value.") && ! paramName.equals("update_url")) { _log.warn("Invalid parameter name in fetch response: " + paramName); return false; } } return checkAttributes(); } private boolean checkAttributes() { List aliases = getAttributeAliases(); Iterator it = aliases.iterator(); while (it.hasNext()) { String alias = (String) it.next(); if (! _parameters.hasParameter("type." + alias)) { _log.warn("Type missing for attribute alias: " + alias); return false; } if ( ! _parameters.hasParameter("count." + alias) ) { if ( ! _parameters.hasParameter("value." + alias) ) { _log.warn("Value missing for attribute alias: " + alias); return false; } } else // count.alias present { if (_parameters.hasParameter("value." + alias)) { _log.warn("Count parameter present for alias: " + alias + "; should use " + alias + ".[index] format"); return false; } int count = getCount(alias); for (int i = 1; i <= count; i++) if (! _parameters.hasParameter("value." + alias + "." + Integer.toString(i))) { _log.warn("Value missing for alias: " + alias + "." + Integer.toString(i)); return false; } } } return true; } }
src/org/openid4java/message/ax/FetchResponse.java
/* * Copyright 2006-2007 Sxip Identity Corporation */ package org.openid4java.message.ax; import org.openid4java.message.ParameterList; import org.openid4java.message.MessageException; import org.openid4java.message.Parameter; import java.net.URL; import java.net.MalformedURLException; import java.util.*; import org.apache.log4j.Logger; /** * Implements the extension for Attribute Exchange fetch responses. * * @author Marius Scurtescu, Johnny Bufu */ public class FetchResponse extends AxMessage { private static Logger _log = Logger.getLogger(FetchResponse.class); private static final boolean DEBUG = _log.isDebugEnabled(); /** * Constructs a Fetch Response with an empty parameter list. */ protected FetchResponse() { _parameters.set(new Parameter("mode", "fetch_response")); if (DEBUG) _log.debug("Created empty fetch response."); } /** * Constructs a Fetch Response with an empty parameter list. */ public static FetchResponse createFetchResponse() { return new FetchResponse(); } /** * Constructs a FetchResponse from a parameter list. * <p> * The parameter list can be extracted from a received message with the * getExtensionParams method of the Message class, and MUST NOT contain * the "openid.<extension_alias>." prefix. */ protected FetchResponse(ParameterList params) { _parameters = params; } public static FetchResponse createFetchResponse(ParameterList params) throws MessageException { FetchResponse resp = new FetchResponse(params); if (! resp.isValid()) throw new MessageException("Invalid parameters for a fetch response"); if (DEBUG) _log.debug("Created fetch response from parameter list:\n" + params); return resp; } /** * Creates a FetchResponse from a FetchRequest message and the data released * by the user. * * @param req FetchRequest message. * @param userData Map<attribute_alias, attribute_values> with the * data released by the user. * @return Properly formed FetchResponse. */ public static FetchResponse createFetchResponse(FetchRequest req, Map userData) { FetchResponse resp = new FetchResponse(); Map attributes = req.getAttributes(); Iterator iter = attributes.keySet().iterator(); while (iter.hasNext()) { String alias = (String) iter.next(); String value = userData.get(alias) != null ? (String) userData.get(alias) : ""; resp.addAttribute(alias, (String) attributes.get(alias), value); } return resp; } /** * Adds an attribute to the fetch response. * * @param alias The alias identifier that will be associated * with the attribute type URI. * @param typeUri The attribute type URI. * @param value The value of the attribute. */ public void addAttribute(String alias, String typeUri, String value) { int count = getCount(alias); String index = ""; switch(count) { case 0: _parameters.set(new Parameter("type." + alias, typeUri)); break; case 1: // rename the existing one _parameters.set(new Parameter("value." + alias + ".1", getParameterValue("value." + alias))); _parameters.removeParameters("value." + alias); index = ".2"; break; default: index = "." +Integer.toString(count + 1); } _parameters.set(new Parameter("value." + alias + index, value)); setCount(alias, ++count); if (DEBUG) _log.debug("Added new attribute to fetch response; type: " + typeUri + " alias: " + alias + " count: " + count); } /** * Returns a list with the attribute value(s) associated for the specified * attribute alias. * * @param alias The attribute alias. * @return List of attribute values. */ public List getAttributeValues(String alias) { List values = new ArrayList(); if (! _parameters.hasParameter("count." + alias)) values.add(getParameterValue("value." + alias)); else for (int i = 1; i <= getCount(alias); i++) values.add(getParameterValue("value." + alias + "." + Integer.toString(i))); return values; } //todo: public String getAttributeValue(String alias) /** * Gets a list of attribute aliases. */ public List getAttributeAliases() { List aliases = new ArrayList(); Iterator it = _parameters.getParameters().iterator(); while (it.hasNext()) { String paramName = ((Parameter) it.next()).getKey(); if (paramName.startsWith("value.")) { String alias; if (paramName.endsWith(".")) alias = paramName.substring(6, paramName.length() - 1); else alias = paramName.substring(6); if ( ! aliases.contains(alias) ) aliases.add(alias); } } return aliases; } /** * Gets a map with attribute aliases -> list of values. */ public Map getAttributes() { Map attributes = new HashMap(); Iterator it = _parameters.getParameters().iterator(); while (it.hasNext()) { String paramName = ((Parameter) it.next()).getKey(); if (paramName.startsWith("value.")) { String alias; if (paramName.endsWith(".")) alias = paramName.substring(6, paramName.length() - 1); else alias = paramName.substring(6); if ( ! attributes.containsKey(alias) ) attributes.put(alias, getAttributeValues(alias)); } } return attributes; } /** * Gets the number of values provided in the fetch response for the * specified attribute alias. * * @param alias The attribute alias. */ public int getCount(String alias) { if (_parameters.hasParameter("count." + alias)) return Integer.parseInt(_parameters.getParameterValue("count." + alias)); else if (_parameters.hasParameter("value." + alias)) return 1; else return 0; } /** * Sets the number of values provided in the fetch response for the * specified attribute alias. * * @param alias The attribute alias. * @param count The number of values. */ private void setCount(String alias, int count) { // make sure that count.< alias >.1 is removed _parameters.removeParameters("count." + alias); if (count > 1) _parameters.set(new Parameter("count." + alias, Integer.toString(count))); } /** * Sets the optional 'update_url' parameter where the OP can later re-post * fetch-response updates for the values of the requested attributes. * * @param updateUrl The URL where the RP accepts later updates * for the requested attributes. */ public void setUpdateUrl(String updateUrl) throws MessageException { try { new URL(updateUrl); } catch (MalformedURLException e) { throw new MessageException("Invalid update_url: " + updateUrl); } if (DEBUG) _log.debug("Setting fetch response update_url: " + updateUrl); _parameters.set(new Parameter("update_url", updateUrl)); } /** * Gets the optional 'update_url' parameter if available, or null otherwise. */ public String getUpdateUrl() { return _parameters.hasParameter("update_url") ? _parameters.getParameterValue("update_url") : null; } /** * Checks the validity of the extension. * <p> * Used when constructing a extension from a parameter list. * * @return True if the extension is valid, false otherwise. */ private boolean isValid() { Iterator it = _parameters.getParameters().iterator(); while (it.hasNext()) { String paramName = ((Parameter) it.next()).getKey(); if (! paramName.equals("mode") && ! paramName.startsWith("type.") && ! paramName.startsWith("count.") && ! paramName.startsWith("value.") && ! paramName.equals("update_url")) { _log.warn("Invalid parameter name in fetch response: " + paramName); return false; } } return checkAttributes(); } private boolean checkAttributes() { List aliases = getAttributeAliases(); Iterator it = aliases.iterator(); while (it.hasNext()) { String alias = (String) it.next(); if (! _parameters.hasParameter("type." + alias)) { _log.warn("Type missing for attribute alias: " + alias); return false; } if ( ! _parameters.hasParameter("count." + alias) ) { if ( ! _parameters.hasParameter("value." + alias) ) { _log.warn("Value missing for attribute alias: " + alias); return false; } } else // count.alias present { if (_parameters.hasParameter("value." + alias)) { _log.warn("Count parameter present for alias: " + alias + "; should use " + alias + ".[index] format"); return false; } int count = getCount(alias); for (int i = 1; i <= count; i++) if (! _parameters.hasParameter("value." + alias + "." + Integer.toString(i))) { _log.warn("Value missing for alias: " + alias + "." + Integer.toString(i)); return false; } } } return true; } }
The userData may be a Map<String alias, String value> or a Map<String alias, List<String> values>. The attribute values are provided by the calling application. If a list of values is specified per attribute, at most n will be sent, where n is the number of attribute values requested in the FetchRequest. If requested attributes are not found in the userData map, they are not included in the fetch response (rather than being included as blank strings). git-svn-id: 04d5f425e1afaf15ebdf27760bf69bd4e651a03a@248 0ddc078c-3e21-0410-a0b4-493e83258d60
src/org/openid4java/message/ax/FetchResponse.java
The userData may be a Map<String alias, String value> or a Map<String alias, List<String> values>. The attribute values are provided by the calling application. If a list of values is specified per attribute, at most n will be sent, where n is the number of attribute values requested in the FetchRequest. If requested attributes are not found in the userData map, they are not included in the fetch response (rather than being included as blank strings).
Java
apache-2.0
64e3f1a084a8ef83b49338d378c0c64a22d0aab4
0
anylineorg/anyline,anylineorg/anyline
package org.anyline.amap.util; import java.util.ArrayList; import java.util.HashMap; import java.util.Hashtable; import java.util.Iterator; import java.util.List; import java.util.Map; import org.anyline.entity.DataRow; import org.anyline.entity.DataSet; import org.anyline.entity.MapLocation; import org.anyline.entity.PageNavi; import org.anyline.jdbc.config.db.impl.PageNaviImpl; import org.anyline.net.HttpUtil; import org.anyline.util.BasicUtil; import org.anyline.util.BeanUtil; import org.anyline.util.ConfigTable; import org.anyline.util.MD5Util; import org.anyline.util.NumberUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * 高德云图 * @author zh * */ public class AmapUtil { private static final Logger log = LoggerFactory.getLogger(AmapUtil.class); private String key = AmapConfig.KEY; private String privateKey = AmapConfig.PRIVATE_KEY; private String table = AmapConfig.TABLE_ID; private static Map<String,AmapUtil> pool = new Hashtable<String,AmapUtil>(); static{ AmapUtil def = new AmapUtil(); pool.put(def.table, def); } public static AmapUtil getInstance(String key, String privateKey, String table){ AmapUtil util = new AmapUtil(); util.key = key; util.privateKey = privateKey; util.table = table; return util; } public static AmapUtil getInstance(){ return getInstance(AmapConfig.TABLE_ID); } public static AmapUtil getInstance(String table){ AmapUtil util = pool.get(table); if(null ==util){ util = new AmapUtil(); util.table = table; pool.put(table, util); } return util; } public static AmapUtil defaultInstance(){ return pool.get(AmapConfig.TABLE_ID); } /** * 添加记录 * @param name name * @param loctype 1:经纬度 2:地址 * @param lon lon * @param lat lat * @param address address * @param extras extras * @return String */ public String create(String name, int loctype, String lon, String lat, String address, Map<String, Object> extras){ String url = "http://yuntuapi.amap.com/datamanage/data/create"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("loctype", loctype+""); Map<String,Object> data = new HashMap<String, Object>(); if(null != extras){ Iterator<String> keys = extras.keySet().iterator(); while(keys.hasNext()){ String key = keys.next(); Object value = extras.get(key); if(BasicUtil.isNotEmpty(value)){ data.put(key, value); } } } data.put("_name", name); if(BasicUtil.isNotEmpty(lon) && BasicUtil.isNotEmpty(lat)){ data.put("_location", lon+","+lat); } if(BasicUtil.isNotEmpty(address)){ data.put("_address", address); } params.put("data", BeanUtil.map2json(data)); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); String id = null; try{ DataRow row = DataRow.parseJson(txt); if(row.containsKey("status")){ String status = row.getString("status"); if("1".equals(status) && row.containsKey("_id")){ id = row.getString("_id"); log.warn("[添加标注完成][id:{}][name:{}]",id,name); }else{ log.warn("[添加标注失败][name:{}][info:{}]", name, row.getString("info")); log.warn("[param:{}]",BeanUtil.map2string(params)); } } }catch(Exception e){ e.printStackTrace(); } return id; } public String create(String name, String lon, String lat, String address, Map<String,Object> extras){ return create(name, 1, lon, lat, address, extras); } public String create(String name, String lon, String lat, Map<String,Object> extras){ return create(name, 1, lon, lat, null, extras); } public String create(String name, int loctype, String lon, String lat, String address){ return create(name, loctype, lon, lat, address, null); } public String create(String name, String lon, String lat, String address){ return create(name, lon, lat, address, null); } public String create(String name, String lon, String lat){ return create(name, lon, lat, null, null); } public String create(String name, String address){ return create(name, null, null, address); } /** * 删除标注 * @param ids ids * @return int */ public int delete(String ... ids){ if(null == ids){ return 0; } List<String> list = new ArrayList<>(); for(String id:ids){ list.add(id); } return delete(list); } public int delete(List<String> ids){ int cnt = 0; if(null == ids || ids.size() ==0){ return cnt; } String param = ""; int size = ids.size(); //一次删除最多50条 大于50打后拆分数据 if(size > 50){ int navi = (size-1)/50 + 1; for(int i=0; i<navi; i++){ int fr = i*50; int to = i*50 + 49; if(to > size-1){ to = size - 1; } List<String> clds = ids.subList(fr, to); cnt += delete(clds); } return cnt; } for(int i=0; i<size; i++){ if(i==0){ param += ids.get(i); }else{ param += "," + ids.get(i); } } Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("ids", param); params.put("sig", sign(params)); String url = "http://yuntuapi.amap.com/datamanage/data/delete"; String txt = HttpUtil.post(url, "UTF-8", params).getText(); if(ConfigTable.isDebug() && log.isWarnEnabled()){ log.warn("[删除标注][param:{}]",BeanUtil.map2string(params)); } try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("status")){ String status = json.getString("status"); if("1".equals(status)){ cnt = json.getInt("success"); log.warn("[删除标注完成][success:{}][fail:{}]", cnt,json.getInt("fail")); }else{ log.warn("[删除标注失败][info:{}]",json.getString("info")); } } }catch(Exception e){ e.printStackTrace(); cnt = -1; } return cnt; } /** * 更新地图 * @param id id * @param name name * @param loctype loctype * @param lon lon * @param lat lat * @param address address * @param extras extras * @return int 0:更新失败,没有对应的id 1:更新完成 -1:异常 */ public int update(String id, String name, int loctype, String lon, String lat, String address, Map<String,Object> extras){ int cnt = 0; String url = "http://yuntuapi.amap.com/datamanage/data/update"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("loctype", loctype+""); Map<String,Object> data = new HashMap<String, Object>(); if(null != extras){ Iterator<String> keys = extras.keySet().iterator(); while(keys.hasNext()){ String key = keys.next(); Object value = extras.get(key); data.put(key, value); } } data.put("_id", id); data.put("_name", name); if(BasicUtil.isNotEmpty(lon) && BasicUtil.isNotEmpty(lat)){ data.put("_location", lon+","+lat); } if(BasicUtil.isNotEmpty(address)){ data.put("_address", address); } params.put("data", BeanUtil.map2json(data)); params.put("sig", sign(params)); String txt = HttpUtil.post(url, "UTF-8", params).getText(); if(ConfigTable.isDebug() && log.isWarnEnabled()){ log.warn("[更新标注][param:{}]",BeanUtil.map2string(params)); } try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("status")){ String status = json.getString("status"); if("1".equals(status)){ cnt = 1; log.warn("[更新标注完成][id:{}][name:{}]",id,name); }else{ log.warn("[更新标注失败][name:{}][info:{}]",name,json.getString("info")); cnt = 0; } } }catch(Exception e){ e.printStackTrace(); cnt = -1; } return cnt; } public int update(String id, String name, String lon, String lat, String address, Map<String,Object> extras){ return update(id, name, 1, lon, lat, address, extras); } public int update(String id, String name, String lon, String lat, Map<String,Object> extras){ return update(id, name, 1, lon, lat, null, extras); } public int update(String id, String name, int loctype, String lon, String lat, String address){ return update(id, name, loctype, lon, lat, address, null); } public int update(String id, String name, String lon, String lat, String address){ return update(id, name, lon, lat, address, null); } public int update(String id, String name, String lon, String lat){ return update(id, name, lon, lat, null, null); } public int update(String id, String name, String address){ return update(id, name, null, null, address); } public int update(String id, String name){ return update(id, name, null); } /** * 创建新地图 * @param name name * @return String */ public String createTable(String name){ String tableId = null; String url = "http://yuntuapi.amap.com/datamanage/table/create"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("name", name); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); DataRow json = DataRow.parseJson(txt); if(json.containsKey("tableid")){ tableId = json.getString("tableid"); log.warn("[创建地图完成][tableid:{}]",tableId); }else{ log.warn("[创建地图失败][info:{}][param:{}]",txt,BeanUtil.map2string(params)); } return tableId; } /** * 本地检索 检索指定云图tableid里,对应城市(全国/省/市/区县)范围的POI信息 * API:http://lbs.amap.com/yuntu/reference/cloudsearch/#t1 * @param keywords keywords * @param city city * @param filter filter * @param sortrule sortrule * @param limit limit * @param page page * @return DataSet */ public DataSet local(String keywords, String city, String filter, String sortrule, int limit, int page){ DataSet set = null; String url = "http://yuntuapi.amap.com/datasearch/local"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("keywords", keywords); if(BasicUtil.isEmpty(city)){ city = "全国"; } params.put("city", city); params.put("filter", filter); params.put("sortrule", sortrule); limit = NumberUtil.min(limit, 100); params.put("limit", limit+""); page = NumberUtil.max(page, 1); params.put("page", page+""); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); PageNavi navi = new PageNaviImpl(); navi.setCurPage(page); navi.setPageRows(limit); try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("count")){ navi.setTotalRow(json.getInt("count")); } if(json.containsKey("datas")){ set = json.getSet("datas"); }else{ set = new DataSet(); log.warn("[本地搜索失败][info:{}]",json.getString("info")); log.warn("[本地搜索失败][params:{}]",BeanUtil.map2string(params)); set.setException(new Exception(json.getString("info"))); } }catch(Exception e){ log.warn("[本地搜索失败][info:{}]",e.getMessage()); set = new DataSet(); set.setException(e); } set.setNavi(navi); log.warn("[本地搜索][size:{}]",navi.getTotalRow()); return set; } /** * 周边搜索 在指定tableid的数据表内,搜索指定中心点和半径范围内,符合筛选条件的位置数据 * API:http://lbs.amap.com/yuntu/reference/cloudsearch/#t2 * @param center center * @param radius 查询半径 * @param keywords 关键词 * @param filters 过滤条件 * @param sortrule 排序 * @param limit 每页多少条 * @param page 第几页 * @return DataSet */ public DataSet around(String center, int radius, String keywords, Map<String,String> filters, String sortrule, int limit, int page){ DataSet set = null; String url = "http://yuntuapi.amap.com/datasearch/around"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("center", center); params.put("radius", radius+""); if(BasicUtil.isNotEmpty(keywords)){ params.put("keywords", keywords); } //过滤条件 if(null != filters && !filters.isEmpty()){ String filter = ""; Iterator<String> keys = filters.keySet().iterator(); while(keys.hasNext()){ String key = keys.next(); String value = filters.get(key); if(BasicUtil.isEmpty(value)){ continue; } if("".equals(filter)){ filter = key + ":" + value; }else{ filter = filter + "+" + key + ":" + value; } } if(!"".equals(filter)){ params.put("filter", filter); } } if(BasicUtil.isNotEmpty(sortrule)){ params.put("sortrule", sortrule); } limit = NumberUtil.min(limit, 100); params.put("limit", limit+""); page = NumberUtil.max(page, 1); params.put("page", page+""); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); PageNavi navi = new PageNaviImpl(); navi.setCurPage(page); navi.setPageRows(limit); try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("count")){ navi.setTotalRow(json.getInt("count")); } if(json.containsKey("datas")){ set = json.getSet("datas"); }else{ log.warn("[周边搜索失败][info:{}]",json.getString("info")); log.warn("[周边搜索失败][params:{}]",BeanUtil.map2string(params)); set = new DataSet(); set.setException(new Exception(json.getString("info"))); } }catch(Exception e){ log.warn("[周边搜索失败][error:{}]",e.getMessage()); e.printStackTrace(); set = new DataSet(); set.setException(e); } set.setNavi(navi); log.warn("[周边搜索][size:{}]",navi.getTotalRow()); return set; } public DataSet around(String center, int radius, Map<String,String> filters, String sortrule, int limit, int page){ return around(center, radius, null, filters, sortrule, limit, page); } public DataSet around(String center, int radius, Map<String,String> filters, int limit, int page){ return around(center, radius, null, filters, null, limit, page); } public DataSet around(String center, int radius, Map<String,String> filters, int limit){ return around(center, radius, null, filters, null, limit, 1); } public DataSet around(String center, int radius, String keywords, String sortrule, int limit, int page){ Map<String,String> filter = new HashMap<String,String>(); return around(center, radius, keywords, filter, sortrule, limit, page); } public DataSet around(String center, int radius, String keywords, int limit, int page){ return around(center, radius, keywords, "", limit, page); } public DataSet around(String center, int radius, int limit, int page){ return around(center, radius, "", limit, page); } public DataSet around(String center, int radius, int limit){ return around(center, radius, "", limit, 1); } public DataSet around(String center, int radius){ return around(center, radius, "", 100, 1); } public DataSet around(String center){ return around(center, ConfigTable.getInt("AMAP_MAX_RADIUS")); } /** * 按条件检索数据(可遍历整表数据) 根据筛选条件检索指定tableid数据表中的数据 * API:http://lbs.amap.com/yuntu/reference/cloudsearch/#t5 * AmapUtil.getInstance(TABLE_TENANT).list("tenant_id:1","shop_id:1", 10, 1); * @param filter 查询条件 * filter=key1:value1+key2:[value2,value3] * filter=type:酒店+star:[3,5] 等同于SQL语句的: WHERE type = "酒店" AND star BETWEEN 3 AND 5 * @param sortrule 排序条件 * 支持按用户自选的字段(仅支持数值类型字段)升降序排序.1:升序,0:降序 * 若不填升降序,默认按升序排列. 示例:按年龄age字段升序排序 sortrule = age:1 * @param limit 每页最大记录数为100 * @param page 当前页数 &gt;=1 * @return DataSet */ public DataSet list(String filter, String sortrule, int limit, int page){ DataSet set = null; String url = "http://yuntuapi.amap.com/datamanage/data/list"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("filter", filter); if(BasicUtil.isNotEmpty(sortrule)){ params.put("sortrule", sortrule); } limit = NumberUtil.min(limit, 100); params.put("limit", limit+""); page = NumberUtil.max(page, 1); params.put("page", page+""); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); PageNavi navi = new PageNaviImpl(); navi.setCurPage(page); navi.setPageRows(limit); try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("count")){ navi.setTotalRow(json.getInt("count")); } if(json.containsKey("datas")){ set = json.getSet("datas"); if(ConfigTable.isDebug() && log.isWarnEnabled()){ log.warn("[条件搜索][结果数量:{}]",set.size()); } }else{ set = new DataSet(); log.warn("[条件搜索失败][info:{}]",json.getString("info")); log.warn("[条件搜索失败][params:{}]",BeanUtil.map2string(params)); set.setException(new Exception(json.getString("info"))); } }catch(Exception e){ log.warn("[条件搜索失败][error:{}]",e.getMessage()); set = new DataSet(); set.setException(e); } set.setNavi(navi); log.warn("[条件搜索][size:{}]",navi.getTotalRow()); return set; } /** * ID检索 在指定tableid的数据表内,查询对应数据id的数据详情 * API:http://lbs.amap.com/yuntu/reference/cloudsearch/#t4 * API:在指定tableid的数据表内,查询对应数据id的数据详情 * @param id id * @return DataRow */ public DataRow info(String id){ DataRow row = null; String url = "http://yuntuapi.amap.com/datasearch/id"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("_id", id); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("datas")){ DataSet set = json.getSet("datas"); if(set.size() > 0){ row = set.getRow(0); } }else{ log.warn("[周边搜索失败][info:{}]",json.getString("info")); log.warn("[周边搜索失败][params:{}]",BeanUtil.map2string(params)); } }catch(Exception e){ log.warn("[周边搜索失败][error:{}]",e.getMessage()); e.printStackTrace(); } return row; } /** * 省数据分布检索 检索指定云图tableid里,全表数据或按照一定查询或筛选过滤而返回的数据中,含有数据的省名称(中文名称)和对应POI个数(count)的信息列表,按照count从高到低的排序展现 * API:http://lbs.amap.com/yuntu/reference/cloudsearch/#t6 * @param keywords 关键字 必须 * @param country ""或null时 默认:中国 * @param filter 条件 * @return DataSet */ public DataSet statByProvince(String keywords, String country, String filter){ DataSet set = null; String url = "http://yuntuapi.amap.com/datasearch/statistics/province"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("filter", filter); params.put("keywords", keywords); country = BasicUtil.evl(country, "中国")+""; params.put("country", country); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("datas")){ set = json.getSet("datas"); }else{ set = new DataSet(); log.warn("[数据分布检索失败][info:{}]",json.getString("info")); log.warn("[数据分布检索失败][params:{}]",BeanUtil.map2string(params)); set.setException(new Exception(json.getString("info"))); } }catch(Exception e){ log.warn("[数据分布检索失败][error:{}]",e.getMessage()); set = new DataSet(); set.setException(e); } return set; } /** * 市数据分布检索 检索指定云图tableid里,全表数据或按照一定查询或筛选过滤而返回的数据中,含有数据的市名称(中文名称)和对应POI个数(count)的信息列表,按照count从高到低的排序展现 * API:http://lbs.amap.com/yuntu/reference/cloudsearch/#t6 * @param keywords 关键字 必须 * @param province ""或null时 默认:全国 * @param filter 条件 * @return DataSet */ public DataSet statByCity(String keywords, String province, String filter){ DataSet set = null; String url = "http://yuntuapi.amap.com/datasearch/statistics/city"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("filter", filter); params.put("keywords", keywords); province = BasicUtil.evl(province, "全国")+""; params.put("country", province); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("datas")){ set = json.getSet("datas"); }else{ set = new DataSet(); log.warn("[数据分布检索失败][info:{}]",json.getString("info")); log.warn("[数据分布检索失败][params:{}]",BeanUtil.map2string(params)); set.setException(new Exception(json.getString("info"))); } }catch(Exception e){ log.warn("[数据分布检索失败][error:{}]",e.getMessage()); set = new DataSet(); set.setException(e); } return set; } /** * 区数据分布检索 检索指定云图tableid里,在指定的省,市下面全表数据或按照一定查询或筛选过滤而返回的数据中,所有区县名称(中文名称)和对应POI个数(count)的信息列表,按照count从高到低的排序展现 * API:http://lbs.amap.com/yuntu/reference/cloudsearch/#t6 * @param keywords 关键字 必须 * @param province province * @param city city * @param filter 条件 * @return DataSet */ public DataSet statByDistrict(String keywords, String province, String city, String filter){ DataSet set = null; String url = "http://yuntuapi.amap.com/datasearch/statistics/province"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("filter", filter); params.put("keywords", keywords); params.put("province", province); params.put("city", city); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("datas")){ set = json.getSet("datas"); }else{ set = new DataSet(); log.warn("[数据分布检索失败][info:{}]",json.getString("info")); log.warn("[数据分布检索失败][params:{}]",BeanUtil.map2string(params)); set.setException(new Exception(json.getString("info"))); } }catch(Exception e){ log.warn("[数据分布检索失败][error:{}]",e.getMessage()); set = new DataSet(); set.setException(e); } return set; } /** * 检索1个中心点,周边一定公里范围内(直线距离或者导航距离最大10公里),一定时间范围内(最大24小时)上传过用户位置信息的用户,返回用户标识,经纬度,距离中心点距离. * @param center center * @param radius radius * @param limit limit * @param timerange timerange * @return DataSet */ public DataSet nearby(String center, String radius, int limit, int timerange ){ DataSet set = null; String url = "http://yuntuapi.amap.com/datasearch/statistics/province"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("center", center); params.put("radius", radius); params.put("searchtype", "0"); params.put("limit", NumberUtil.min(limit, 100)+""); params.put("timerange", BasicUtil.evl(timerange,"1800")+""); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("datas")){ set = json.getSet("datas"); }else{ set = new DataSet(); log.warn("[附近检索失败][info:}{}]",json.getString("info")); log.warn("[附近检索失败][params:{}]",BeanUtil.map2string(params)); set.setException(new Exception(json.getString("info"))); } }catch(Exception e){ log.warn("[附近检索失败][error:{}]",e.getMessage()); set = new DataSet(); set.setException(e); } return set; } /** * 按坐标查地址 * @param location 经度在前,纬度在后,经纬度间以“,”分割 * @return DataRow */ public DataRow regeo(String location){ DataRow row = null; String url = "http://restapi.amap.com/v3/geocode/regeo"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("location", location); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.get(url, "UTF-8", params).getText(); try{ row = DataRow.parseJson(txt); if(null != row){ row = row.getRow("regeocode"); if(null != row){ DataRow addressComponent = row.getRow("addressComponent"); if(null != addressComponent){ addressComponent.put("address", row.getString("formatted_address")); row = addressComponent; }else{ row.put("address", row.getString("formatted_address")); } } } }catch(Exception e){ e.printStackTrace(); } return row; } public DataRow regeo(String lon, String lat){ return regeo(lon+","+lat); } /** * 根据地址查坐标 * @param address address * @param city city * @return MapLocation */ public MapLocation geo(String address, String city){ MapLocation location = null; String url = "http://restapi.amap.com/v3/geocode/geo"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("address", address); if(BasicUtil.isNotEmpty(city)){ params.put("city", city); } String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.get(url, "UTF-8", params).getText(); try{ DataRow json = DataRow.parseJson(txt); DataSet set = null; if(json.containsKey("geocodes")){ set = json.getSet("geocodes"); if(set.size()>0){ DataRow row = set.getRow(0); location = new MapLocation(row.getString("LOCATION")); location.setCode(row.getString("ADCODE")); location.setProvinceCode(BasicUtil.cut(row.getString("ADCODE"),0,4)); location.setProvinceNm(row.getString("PROVINCE")); location.setCityCode(row.getString("CITYCODE")); location.setCityNm(row.getString("CITY")); location.setCountyCode(row.getString("ADCODE")); location.setCountyNm(row.getString("DISTRICT")); location.setStreet(row.getString("STREET")); location.setAddress(row.getString("FORMATTED_ADDRESS")); location.setLevel(row.getString("LEVEL")); } }else{ log.warn("[坐标查询失败][info:{}][params:{}]",json.getString("info"),BeanUtil.map2string(params)); } }catch(Exception e){ log.warn("[坐标查询失败][error:{}]",e.getMessage()); } return location; } public MapLocation geo(String address){ return geo(address, null); } /** * 驾车路线规划 * http://lbs.amap.com/api/webservice/guide/api/direction#driving * @param origin 出发地 origin 出发地 * @param destination 目的地 destination 目的地 * @param points 途经地 最多支持16个 坐标点之间用";"分隔 * @param strategy 选路策略 0,不考虑当时路况,返回耗时最短的路线,但是此路线不一定距离最短 * 1,不走收费路段,且耗时最少的路线 * 2,不考虑路况,仅走距离最短的路线,但是可能存在穿越小路/小区的情况 * @return DataRow */ @SuppressWarnings({ "rawtypes", "unchecked" }) public DataRow directionDrive(String origin, String destination, String points, int strategy){ DataRow row = null; String url = "http://restapi.amap.com/v3/direction/driving"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("origin", origin); params.put("destination", destination); params.put("strategy", strategy+""); if(BasicUtil.isNotEmpty(points)){ params.put("points", points); } String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.get(url, "UTF-8", params).getText(); try{ row = DataRow.parseJson(txt); DataRow route = row.getRow("route"); if(null != route){ List paths = route.getList("PATHS"); if(paths.size()>0){ DataRow path = (DataRow)paths.get(0); row = path; List<DataRow> steps = (List<DataRow>)path.getList("steps"); List<String> polylines = new ArrayList<>(); for(DataRow step:steps){ String polyline = step.getString("polyline"); String[] tmps = polyline.split(";"); for(String tmp:tmps){ polylines.add(tmp); } } row.put("polylines", polylines); } } }catch(Exception e){ log.warn("[线路规划失败][error:{}]",e.getMessage()); } return row; } public DataRow directionDrive(String origin, String destination){ return directionDrive(origin, destination, null, 0); } public DataRow directionDrive(String origin, String destination, String points){ return directionDrive(origin, destination, points, 0); } public DataSet poi(String city, String keywords){ DataSet set = new DataSet(); String url = "https://restapi.amap.com/v5/place/text"; Map<String,Object> params = new HashMap<String,Object>(); params.put("city", city); params.put("keywords", keywords); params.put("page","1"); params.put("offset","20"); DataRow row = api(url,params); if(row.getInt("status",0)==1){ List<DataRow> items = (List<DataRow>)row.get("POIS"); for(DataRow item:items){ set.add(item); } } return set; } public DataRow api(String url, Map<String,Object> params){ params.put("key", this.key); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.get(url, "UTF-8", params).getText(); DataRow row = null; try { row = DataRow.parseJson(txt); }catch (Exception e){ row = new DataRow(); row.put("status",0); row.put("info", e.getMessage()); e.printStackTrace(); } return row; } /** * 签名 * @param params params * @return String */ public String sign(Map<String,Object> params){ String sign = ""; sign = BeanUtil.map2string(params) + this.privateKey; sign = MD5Util.sign(sign,"UTF-8"); return sign; } }
anyline-amap/src/main/java/org/anyline/amap/util/AmapUtil.java
package org.anyline.amap.util; import java.util.ArrayList; import java.util.HashMap; import java.util.Hashtable; import java.util.Iterator; import java.util.List; import java.util.Map; import org.anyline.entity.DataRow; import org.anyline.entity.DataSet; import org.anyline.entity.MapLocation; import org.anyline.entity.PageNavi; import org.anyline.jdbc.config.db.impl.PageNaviImpl; import org.anyline.net.HttpUtil; import org.anyline.util.BasicUtil; import org.anyline.util.BeanUtil; import org.anyline.util.ConfigTable; import org.anyline.util.MD5Util; import org.anyline.util.NumberUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * 高德云图 * @author zh * */ public class AmapUtil { private static final Logger log = LoggerFactory.getLogger(AmapUtil.class); private String key = AmapConfig.KEY; private String privateKey = AmapConfig.PRIVATE_KEY; private String table = AmapConfig.TABLE_ID; private static Map<String,AmapUtil> pool = new Hashtable<String,AmapUtil>(); static{ AmapUtil def = new AmapUtil(); pool.put(def.table, def); } public static AmapUtil getInstance(String key, String privateKey, String table){ AmapUtil util = new AmapUtil(); util.key = key; util.privateKey = privateKey; util.table = table; return util; } public static AmapUtil getInstance(){ return getInstance(AmapConfig.TABLE_ID); } public static AmapUtil getInstance(String table){ AmapUtil util = pool.get(table); if(null ==util){ util = new AmapUtil(); util.table = table; pool.put(table, util); } return util; } public static AmapUtil defaultInstance(){ return pool.get(AmapConfig.TABLE_ID); } /** * 添加记录 * @param name name * @param loctype 1:经纬度 2:地址 * @param lon lon * @param lat lat * @param address address * @param extras extras * @return String */ public String create(String name, int loctype, String lon, String lat, String address, Map<String, Object> extras){ String url = "http://yuntuapi.amap.com/datamanage/data/create"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("loctype", loctype+""); Map<String,Object> data = new HashMap<String, Object>(); if(null != extras){ Iterator<String> keys = extras.keySet().iterator(); while(keys.hasNext()){ String key = keys.next(); Object value = extras.get(key); if(BasicUtil.isNotEmpty(value)){ data.put(key, value); } } } data.put("_name", name); if(BasicUtil.isNotEmpty(lon) && BasicUtil.isNotEmpty(lat)){ data.put("_location", lon+","+lat); } if(BasicUtil.isNotEmpty(address)){ data.put("_address", address); } params.put("data", BeanUtil.map2json(data)); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); String id = null; try{ DataRow row = DataRow.parseJson(txt); if(row.containsKey("status")){ String status = row.getString("status"); if("1".equals(status) && row.containsKey("_id")){ id = row.getString("_id"); log.warn("[添加标注完成][id:{}][name:{}]",id,name); }else{ log.warn("[添加标注失败][name:{}][info:{}]", name, row.getString("info")); log.warn("[param:{}]",BeanUtil.map2string(params)); } } }catch(Exception e){ e.printStackTrace(); } return id; } public String create(String name, String lon, String lat, String address, Map<String,Object> extras){ return create(name, 1, lon, lat, address, extras); } public String create(String name, String lon, String lat, Map<String,Object> extras){ return create(name, 1, lon, lat, null, extras); } public String create(String name, int loctype, String lon, String lat, String address){ return create(name, loctype, lon, lat, address, null); } public String create(String name, String lon, String lat, String address){ return create(name, lon, lat, address, null); } public String create(String name, String lon, String lat){ return create(name, lon, lat, null, null); } public String create(String name, String address){ return create(name, null, null, address); } /** * 删除标注 * @param ids ids * @return int */ public int delete(String ... ids){ if(null == ids){ return 0; } List<String> list = new ArrayList<>(); for(String id:ids){ list.add(id); } return delete(list); } public int delete(List<String> ids){ int cnt = 0; if(null == ids || ids.size() ==0){ return cnt; } String param = ""; int size = ids.size(); //一次删除最多50条 大于50打后拆分数据 if(size > 50){ int navi = (size-1)/50 + 1; for(int i=0; i<navi; i++){ int fr = i*50; int to = i*50 + 49; if(to > size-1){ to = size - 1; } List<String> clds = ids.subList(fr, to); cnt += delete(clds); } return cnt; } for(int i=0; i<size; i++){ if(i==0){ param += ids.get(i); }else{ param += "," + ids.get(i); } } Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("ids", param); params.put("sig", sign(params)); String url = "http://yuntuapi.amap.com/datamanage/data/delete"; String txt = HttpUtil.post(url, "UTF-8", params).getText(); if(ConfigTable.isDebug() && log.isWarnEnabled()){ log.warn("[删除标注][param:{}]",BeanUtil.map2string(params)); } try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("status")){ String status = json.getString("status"); if("1".equals(status)){ cnt = json.getInt("success"); log.warn("[删除标注完成][success:{}][fail:{}]", cnt,json.getInt("fail")); }else{ log.warn("[删除标注失败][info:{}]",json.getString("info")); } } }catch(Exception e){ e.printStackTrace(); cnt = -1; } return cnt; } /** * 更新地图 * @param id id * @param name name * @param loctype loctype * @param lon lon * @param lat lat * @param address address * @param extras extras * @return int 0:更新失败,没有对应的id 1:更新完成 -1:异常 */ public int update(String id, String name, int loctype, String lon, String lat, String address, Map<String,Object> extras){ int cnt = 0; String url = "http://yuntuapi.amap.com/datamanage/data/update"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("loctype", loctype+""); Map<String,Object> data = new HashMap<String, Object>(); if(null != extras){ Iterator<String> keys = extras.keySet().iterator(); while(keys.hasNext()){ String key = keys.next(); Object value = extras.get(key); data.put(key, value); } } data.put("_id", id); data.put("_name", name); if(BasicUtil.isNotEmpty(lon) && BasicUtil.isNotEmpty(lat)){ data.put("_location", lon+","+lat); } if(BasicUtil.isNotEmpty(address)){ data.put("_address", address); } params.put("data", BeanUtil.map2json(data)); params.put("sig", sign(params)); String txt = HttpUtil.post(url, "UTF-8", params).getText(); if(ConfigTable.isDebug() && log.isWarnEnabled()){ log.warn("[更新标注][param:{}]",BeanUtil.map2string(params)); } try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("status")){ String status = json.getString("status"); if("1".equals(status)){ cnt = 1; log.warn("[更新标注完成][id:{}][name:{}]",id,name); }else{ log.warn("[更新标注失败][name:{}][info:{}]",name,json.getString("info")); cnt = 0; } } }catch(Exception e){ e.printStackTrace(); cnt = -1; } return cnt; } public int update(String id, String name, String lon, String lat, String address, Map<String,Object> extras){ return update(id, name, 1, lon, lat, address, extras); } public int update(String id, String name, String lon, String lat, Map<String,Object> extras){ return update(id, name, 1, lon, lat, null, extras); } public int update(String id, String name, int loctype, String lon, String lat, String address){ return update(id, name, loctype, lon, lat, address, null); } public int update(String id, String name, String lon, String lat, String address){ return update(id, name, lon, lat, address, null); } public int update(String id, String name, String lon, String lat){ return update(id, name, lon, lat, null, null); } public int update(String id, String name, String address){ return update(id, name, null, null, address); } public int update(String id, String name){ return update(id, name, null); } /** * 创建新地图 * @param name name * @return String */ public String createTable(String name){ String tableId = null; String url = "http://yuntuapi.amap.com/datamanage/table/create"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("name", name); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); DataRow json = DataRow.parseJson(txt); if(json.containsKey("tableid")){ tableId = json.getString("tableid"); log.warn("[创建地图完成][tableid:{}]",tableId); }else{ log.warn("[创建地图失败][info:{}][param:{}]",txt,BeanUtil.map2string(params)); } return tableId; } /** * 本地检索 检索指定云图tableid里,对应城市(全国/省/市/区县)范围的POI信息 * API:http://lbs.amap.com/yuntu/reference/cloudsearch/#t1 * @param keywords keywords * @param city city * @param filter filter * @param sortrule sortrule * @param limit limit * @param page page * @return DataSet */ public DataSet local(String keywords, String city, String filter, String sortrule, int limit, int page){ DataSet set = null; String url = "http://yuntuapi.amap.com/datasearch/local"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("keywords", keywords); if(BasicUtil.isEmpty(city)){ city = "全国"; } params.put("city", city); params.put("filter", filter); params.put("sortrule", sortrule); limit = NumberUtil.min(limit, 100); params.put("limit", limit+""); page = NumberUtil.max(page, 1); params.put("page", page+""); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); PageNavi navi = new PageNaviImpl(); navi.setCurPage(page); navi.setPageRows(limit); try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("count")){ navi.setTotalRow(json.getInt("count")); } if(json.containsKey("datas")){ set = json.getSet("datas"); }else{ set = new DataSet(); log.warn("[本地搜索失败][info:{}]",json.getString("info")); log.warn("[本地搜索失败][params:{}]",BeanUtil.map2string(params)); set.setException(new Exception(json.getString("info"))); } }catch(Exception e){ log.warn("[本地搜索失败][info:{}]",e.getMessage()); set = new DataSet(); set.setException(e); } set.setNavi(navi); log.warn("[本地搜索][size:{}]",navi.getTotalRow()); return set; } /** * 周边搜索 在指定tableid的数据表内,搜索指定中心点和半径范围内,符合筛选条件的位置数据 * API:http://lbs.amap.com/yuntu/reference/cloudsearch/#t2 * @param center center * @param radius 查询半径 * @param keywords 关键词 * @param filters 过滤条件 * @param sortrule 排序 * @param limit 每页多少条 * @param page 第几页 * @return DataSet */ public DataSet around(String center, int radius, String keywords, Map<String,String> filters, String sortrule, int limit, int page){ DataSet set = null; String url = "http://yuntuapi.amap.com/datasearch/around"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("center", center); params.put("radius", radius+""); if(BasicUtil.isNotEmpty(keywords)){ params.put("keywords", keywords); } //过滤条件 if(null != filters && !filters.isEmpty()){ String filter = ""; Iterator<String> keys = filters.keySet().iterator(); while(keys.hasNext()){ String key = keys.next(); String value = filters.get(key); if(BasicUtil.isEmpty(value)){ continue; } if("".equals(filter)){ filter = key + ":" + value; }else{ filter = filter + "+" + key + ":" + value; } } if(!"".equals(filter)){ params.put("filter", filter); } } if(BasicUtil.isNotEmpty(sortrule)){ params.put("sortrule", sortrule); } limit = NumberUtil.min(limit, 100); params.put("limit", limit+""); page = NumberUtil.max(page, 1); params.put("page", page+""); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); PageNavi navi = new PageNaviImpl(); navi.setCurPage(page); navi.setPageRows(limit); try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("count")){ navi.setTotalRow(json.getInt("count")); } if(json.containsKey("datas")){ set = json.getSet("datas"); }else{ log.warn("[周边搜索失败][info:{}]",json.getString("info")); log.warn("[周边搜索失败][params:{}]",BeanUtil.map2string(params)); set = new DataSet(); set.setException(new Exception(json.getString("info"))); } }catch(Exception e){ log.warn("[周边搜索失败][error:{}]",e.getMessage()); e.printStackTrace(); set = new DataSet(); set.setException(e); } set.setNavi(navi); log.warn("[周边搜索][size:{}]",navi.getTotalRow()); return set; } public DataSet around(String center, int radius, Map<String,String> filters, String sortrule, int limit, int page){ return around(center, radius, null, filters, sortrule, limit, page); } public DataSet around(String center, int radius, Map<String,String> filters, int limit, int page){ return around(center, radius, null, filters, null, limit, page); } public DataSet around(String center, int radius, Map<String,String> filters, int limit){ return around(center, radius, null, filters, null, limit, 1); } public DataSet around(String center, int radius, String keywords, String sortrule, int limit, int page){ Map<String,String> filter = new HashMap<String,String>(); return around(center, radius, keywords, filter, sortrule, limit, page); } public DataSet around(String center, int radius, String keywords, int limit, int page){ return around(center, radius, keywords, "", limit, page); } public DataSet around(String center, int radius, int limit, int page){ return around(center, radius, "", limit, page); } public DataSet around(String center, int radius, int limit){ return around(center, radius, "", limit, 1); } public DataSet around(String center, int radius){ return around(center, radius, "", 100, 1); } public DataSet around(String center){ return around(center, ConfigTable.getInt("AMAP_MAX_RADIUS")); } /** * 按条件检索数据(可遍历整表数据) 根据筛选条件检索指定tableid数据表中的数据 * API:http://lbs.amap.com/yuntu/reference/cloudsearch/#t5 * AmapUtil.getInstance(TABLE_TENANT).list("tenant_id:1","shop_id:1", 10, 1); * @param filter 查询条件 * filter=key1:value1+key2:[value2,value3] * filter=type:酒店+star:[3,5] 等同于SQL语句的: WHERE type = "酒店" AND star BETWEEN 3 AND 5 * @param sortrule 排序条件 * 支持按用户自选的字段(仅支持数值类型字段)升降序排序。1:升序,0:降序 * 若不填升降序,默认按升序排列。 示例:按年龄age字段升序排序 sortrule = age:1 * @param limit 每页最大记录数为100 * @param page 当前页数 &gt;=1 * @return DataSet */ public DataSet list(String filter, String sortrule, int limit, int page){ DataSet set = null; String url = "http://yuntuapi.amap.com/datamanage/data/list"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("filter", filter); if(BasicUtil.isNotEmpty(sortrule)){ params.put("sortrule", sortrule); } limit = NumberUtil.min(limit, 100); params.put("limit", limit+""); page = NumberUtil.max(page, 1); params.put("page", page+""); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); PageNavi navi = new PageNaviImpl(); navi.setCurPage(page); navi.setPageRows(limit); try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("count")){ navi.setTotalRow(json.getInt("count")); } if(json.containsKey("datas")){ set = json.getSet("datas"); if(ConfigTable.isDebug() && log.isWarnEnabled()){ log.warn("[条件搜索][结果数量:{}]",set.size()); } }else{ set = new DataSet(); log.warn("[条件搜索失败][info:{}]",json.getString("info")); log.warn("[条件搜索失败][params:{}]",BeanUtil.map2string(params)); set.setException(new Exception(json.getString("info"))); } }catch(Exception e){ log.warn("[条件搜索失败][error:{}]",e.getMessage()); set = new DataSet(); set.setException(e); } set.setNavi(navi); log.warn("[条件搜索][size:{}]",navi.getTotalRow()); return set; } /** * ID检索 在指定tableid的数据表内,查询对应数据id的数据详情 * API:http://lbs.amap.com/yuntu/reference/cloudsearch/#t4 * API:在指定tableid的数据表内,查询对应数据id的数据详情 * @param id id * @return DataRow */ public DataRow info(String id){ DataRow row = null; String url = "http://yuntuapi.amap.com/datasearch/id"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("_id", id); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("datas")){ DataSet set = json.getSet("datas"); if(set.size() > 0){ row = set.getRow(0); } }else{ log.warn("[周边搜索失败][info:{}]",json.getString("info")); log.warn("[周边搜索失败][params:{}]",BeanUtil.map2string(params)); } }catch(Exception e){ log.warn("[周边搜索失败][error:{}]",e.getMessage()); e.printStackTrace(); } return row; } /** * 省数据分布检索 检索指定云图tableid里,全表数据或按照一定查询或筛选过滤而返回的数据中,含有数据的省名称(中文名称)和对应POI个数(count)的信息列表,按照count从高到低的排序展现 * API:http://lbs.amap.com/yuntu/reference/cloudsearch/#t6 * @param keywords 关键字 必须 * @param country ""或null时 默认:中国 * @param filter 条件 * @return DataSet */ public DataSet statByProvince(String keywords, String country, String filter){ DataSet set = null; String url = "http://yuntuapi.amap.com/datasearch/statistics/province"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("filter", filter); params.put("keywords", keywords); country = BasicUtil.evl(country, "中国")+""; params.put("country", country); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("datas")){ set = json.getSet("datas"); }else{ set = new DataSet(); log.warn("[数据分布检索失败][info:{}]",json.getString("info")); log.warn("[数据分布检索失败][params:{}]",BeanUtil.map2string(params)); set.setException(new Exception(json.getString("info"))); } }catch(Exception e){ log.warn("[数据分布检索失败][error:{}]",e.getMessage()); set = new DataSet(); set.setException(e); } return set; } /** * 市数据分布检索 检索指定云图tableid里,全表数据或按照一定查询或筛选过滤而返回的数据中,含有数据的市名称(中文名称)和对应POI个数(count)的信息列表,按照count从高到低的排序展现 * API:http://lbs.amap.com/yuntu/reference/cloudsearch/#t6 * @param keywords 关键字 必须 * @param province ""或null时 默认:全国 * @param filter 条件 * @return DataSet */ public DataSet statByCity(String keywords, String province, String filter){ DataSet set = null; String url = "http://yuntuapi.amap.com/datasearch/statistics/city"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("filter", filter); params.put("keywords", keywords); province = BasicUtil.evl(province, "全国")+""; params.put("country", province); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("datas")){ set = json.getSet("datas"); }else{ set = new DataSet(); log.warn("[数据分布检索失败][info:{}]",json.getString("info")); log.warn("[数据分布检索失败][params:{}]",BeanUtil.map2string(params)); set.setException(new Exception(json.getString("info"))); } }catch(Exception e){ log.warn("[数据分布检索失败][error:{}]",e.getMessage()); set = new DataSet(); set.setException(e); } return set; } /** * 区数据分布检索 检索指定云图tableid里,在指定的省,市下面全表数据或按照一定查询或筛选过滤而返回的数据中,所有区县名称(中文名称)和对应POI个数(count)的信息列表,按照count从高到低的排序展现 * API:http://lbs.amap.com/yuntu/reference/cloudsearch/#t6 * @param keywords 关键字 必须 * @param province province * @param city city * @param filter 条件 * @return DataSet */ public DataSet statByDistrict(String keywords, String province, String city, String filter){ DataSet set = null; String url = "http://yuntuapi.amap.com/datasearch/statistics/province"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("tableid", this.table); params.put("filter", filter); params.put("keywords", keywords); params.put("province", province); params.put("city", city); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("datas")){ set = json.getSet("datas"); }else{ set = new DataSet(); log.warn("[数据分布检索失败][info:{}]",json.getString("info")); log.warn("[数据分布检索失败][params:{}]",BeanUtil.map2string(params)); set.setException(new Exception(json.getString("info"))); } }catch(Exception e){ log.warn("[数据分布检索失败][error:{}]",e.getMessage()); set = new DataSet(); set.setException(e); } return set; } /** * 检索1个中心点,周边一定公里范围内(直线距离或者导航距离最大10公里),一定时间范围内(最大24小时)上传过用户位置信息的用户,返回用户标识,经纬度,距离中心点距离。 * @param center center * @param radius radius * @param limit limit * @param timerange timerange * @return DataSet */ public DataSet nearby(String center, String radius, int limit, int timerange ){ DataSet set = null; String url = "http://yuntuapi.amap.com/datasearch/statistics/province"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("center", center); params.put("radius", radius); params.put("searchtype", "0"); params.put("limit", NumberUtil.min(limit, 100)+""); params.put("timerange", BasicUtil.evl(timerange,"1800")+""); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.post(url, "UTF-8", params).getText(); try{ DataRow json = DataRow.parseJson(txt); if(json.containsKey("datas")){ set = json.getSet("datas"); }else{ set = new DataSet(); log.warn("[附近检索失败][info:}{}]",json.getString("info")); log.warn("[附近检索失败][params:{}]",BeanUtil.map2string(params)); set.setException(new Exception(json.getString("info"))); } }catch(Exception e){ log.warn("[附近检索失败][error:{}]",e.getMessage()); set = new DataSet(); set.setException(e); } return set; } /** * 按坐标查地址 * @param location 经度在前,纬度在后,经纬度间以“,”分割 * @return DataRow */ public DataRow regeo(String location){ DataRow row = null; String url = "http://restapi.amap.com/v3/geocode/regeo"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("location", location); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.get(url, "UTF-8", params).getText(); try{ row = DataRow.parseJson(txt); if(null != row){ row = row.getRow("regeocode"); if(null != row){ DataRow addressComponent = row.getRow("addressComponent"); if(null != addressComponent){ addressComponent.put("address", row.getString("formatted_address")); row = addressComponent; }else{ row.put("address", row.getString("formatted_address")); } } } }catch(Exception e){ e.printStackTrace(); } return row; } public DataRow regeo(String lon, String lat){ return regeo(lon+","+lat); } /** * 根据地址查坐标 * @param address address * @param city city * @return MapLocation */ public MapLocation geo(String address, String city){ MapLocation location = null; String url = "http://restapi.amap.com/v3/geocode/geo"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("address", address); if(BasicUtil.isNotEmpty(city)){ params.put("city", city); } String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.get(url, "UTF-8", params).getText(); try{ DataRow json = DataRow.parseJson(txt); DataSet set = null; if(json.containsKey("geocodes")){ set = json.getSet("geocodes"); if(set.size()>0){ DataRow row = set.getRow(0); location = new MapLocation(row.getString("LOCATION")); location.setCode(row.getString("ADCODE")); location.setProvinceCode(BasicUtil.cut(row.getString("ADCODE"),0,4)); location.setProvinceNm(row.getString("PROVINCE")); location.setCityCode(row.getString("CITYCODE")); location.setCityNm(row.getString("CITY")); location.setCountyCode(row.getString("ADCODE")); location.setCountyNm(row.getString("DISTRICT")); location.setStreet(row.getString("STREET")); location.setAddress(row.getString("FORMATTED_ADDRESS")); location.setLevel(row.getString("LEVEL")); } }else{ log.warn("[坐标查询失败][info:{}][params:{}]",json.getString("info"),BeanUtil.map2string(params)); } }catch(Exception e){ log.warn("[坐标查询失败][error:{}]",e.getMessage()); } return location; } public MapLocation geo(String address){ return geo(address, null); } /** * 驾车路线规划 * http://lbs.amap.com/api/webservice/guide/api/direction#driving * @param origin 出发地 origin 出发地 * @param destination 目的地 destination 目的地 * @param points 途经地 最多支持16个 坐标点之间用";"分隔 * @param strategy 选路策略 0,不考虑当时路况,返回耗时最短的路线,但是此路线不一定距离最短 * 1,不走收费路段,且耗时最少的路线 * 2,不考虑路况,仅走距离最短的路线,但是可能存在穿越小路/小区的情况 * @return DataRow */ @SuppressWarnings({ "rawtypes", "unchecked" }) public DataRow directionDrive(String origin, String destination, String points, int strategy){ DataRow row = null; String url = "http://restapi.amap.com/v3/direction/driving"; Map<String,Object> params = new HashMap<String,Object>(); params.put("key", this.key); params.put("origin", origin); params.put("destination", destination); params.put("strategy", strategy+""); if(BasicUtil.isNotEmpty(points)){ params.put("points", points); } String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.get(url, "UTF-8", params).getText(); try{ row = DataRow.parseJson(txt); DataRow route = row.getRow("route"); if(null != route){ List paths = route.getList("PATHS"); if(paths.size()>0){ DataRow path = (DataRow)paths.get(0); row = path; List<DataRow> steps = (List<DataRow>)path.getList("steps"); List<String> polylines = new ArrayList<>(); for(DataRow step:steps){ String polyline = step.getString("polyline"); String[] tmps = polyline.split(";"); for(String tmp:tmps){ polylines.add(tmp); } } row.put("polylines", polylines); } } }catch(Exception e){ log.warn("[线路规划失败][error:{}]",e.getMessage()); } return row; } public DataRow directionDrive(String origin, String destination){ return directionDrive(origin, destination, null, 0); } public DataRow directionDrive(String origin, String destination, String points){ return directionDrive(origin, destination, points, 0); } public DataSet poi(String city, String keywords){ DataSet set = new DataSet(); String url = "https://restapi.amap.com/v5/place/text"; Map<String,Object> params = new HashMap<String,Object>(); params.put("city", city); params.put("keywords", keywords); params.put("page","1"); params.put("offset","20"); DataRow row = api(url,params); if(row.getInt("status",0)==1){ List<DataRow> items = (List<DataRow>)row.get("POIS"); for(DataRow item:items){ set.add(item); } } return set; } public DataRow api(String url, Map<String,Object> params){ params.put("key", this.key); String sign = sign(params); params.put("sig", sign); String txt = HttpUtil.get(url, "UTF-8", params).getText(); DataRow row = null; try { row = DataRow.parseJson(txt); }catch (Exception e){ row = new DataRow(); row.put("status",0); row.put("info", e.getMessage()); e.printStackTrace(); } return row; } /** * 签名 * @param params params * @return String */ public String sign(Map<String,Object> params){ String sign = ""; sign = BeanUtil.map2string(params) + this.privateKey; sign = MD5Util.sign(sign,"UTF-8"); return sign; } }
AmapUtil
anyline-amap/src/main/java/org/anyline/amap/util/AmapUtil.java
AmapUtil
Java
apache-2.0
5d0a91b81dd8e33639e506b85876b6e169175ecb
0
jbonofre/beam,chamikaramj/beam,tweise/beam,apache/beam,chamikaramj/beam,tgroh/beam,tweise/incubator-beam,lukecwik/incubator-beam,markflyhigh/incubator-beam,manuzhang/beam,amitsela/incubator-beam,eljefe6a/incubator-beam,charlesccychen/beam,apache/beam,apache/beam,tgroh/beam,xsm110/Apache-Beam,manuzhang/beam,chamikaramj/beam,jasonkuster/incubator-beam,robertwb/incubator-beam,jasonkuster/incubator-beam,robertwb/incubator-beam,manuzhang/incubator-beam,lukecwik/incubator-beam,ravwojdyla/incubator-beam,vikkyrk/incubator-beam,lukecwik/incubator-beam,yk5/beam,jbonofre/beam,rangadi/beam,robertwb/incubator-beam,wangyum/beam,manuzhang/beam,jbonofre/incubator-beam,tgroh/incubator-beam,markflyhigh/incubator-beam,jasonkuster/beam,apache/beam,lukecwik/incubator-beam,rangadi/beam,xsm110/Apache-Beam,chamikaramj/beam,rangadi/beam,charlesccychen/incubator-beam,tweise/beam,sammcveety/incubator-beam,amitsela/incubator-beam,robertwb/incubator-beam,yk5/beam,lukecwik/incubator-beam,rangadi/incubator-beam,chamikaramj/beam,wangyum/beam,robertwb/incubator-beam,iemejia/incubator-beam,dhalperi/incubator-beam,dhalperi/beam,apache/beam,ravwojdyla/incubator-beam,lukecwik/incubator-beam,charlesccychen/incubator-beam,RyanSkraba/beam,mxm/incubator-beam,amitsela/beam,chamikaramj/incubator-beam,apache/beam,mxm/incubator-beam,dhalperi/beam,tgroh/incubator-beam,eljefe6a/incubator-beam,markflyhigh/incubator-beam,sammcveety/incubator-beam,rangadi/beam,peihe/incubator-beam,chamikaramj/beam,charlesccychen/incubator-beam,staslev/incubator-beam,iemejia/incubator-beam,dhalperi/beam,sammcveety/incubator-beam,wtanaka/beam,rangadi/incubator-beam,jbonofre/beam,charlesccychen/beam,wtanaka/beam,peihe/incubator-beam,rangadi/incubator-beam,RyanSkraba/beam,rangadi/beam,lukecwik/incubator-beam,rangadi/beam,markflyhigh/incubator-beam,lukecwik/incubator-beam,vikkyrk/incubator-beam,wangyum/beam,amitsela/beam,robertwb/incubator-beam,charlesccychen/beam,charlesccychen/beam,charlesccychen/beam,rangadi/beam,markflyhigh/incubator-beam,staslev/beam,manuzhang/incubator-beam,joshualitt/incubator-beam,tweise/incubator-beam,charlesccychen/beam,chamikaramj/beam,chamikaramj/beam,ravwojdyla/incubator-beam,vikkyrk/incubator-beam,peihe/incubator-beam,apache/beam,RyanSkraba/beam,jasonkuster/beam,amarouni/incubator-beam,lukecwik/incubator-beam,robertwb/incubator-beam,dhalperi/incubator-beam,RyanSkraba/beam,yk5/beam,amitsela/beam,markflyhigh/incubator-beam,apache/beam,josauder/AOP_incubator_beam,jbonofre/beam,lukecwik/incubator-beam,staslev/beam,RyanSkraba/beam,apache/beam,chamikaramj/beam,staslev/beam,RyanSkraba/beam,RyanSkraba/beam,chamikaramj/incubator-beam,wangyum/beam,joshualitt/incubator-beam,apache/beam,wtanaka/beam,jasonkuster/beam,staslev/incubator-beam,tgroh/beam,xsm110/Apache-Beam,markflyhigh/incubator-beam,charlesccychen/beam,tgroh/beam,josauder/AOP_incubator_beam,robertwb/incubator-beam,eljefe6a/incubator-beam,amarouni/incubator-beam,apache/beam,robertwb/incubator-beam,chamikaramj/beam,robertwb/incubator-beam,jbonofre/incubator-beam
/* * Copyright (C) 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.dataflow.sdk.util; import static com.google.cloud.dataflow.sdk.util.Structs.addList; import com.google.api.client.util.Base64; import com.google.cloud.dataflow.sdk.coders.Coder; import com.google.cloud.dataflow.sdk.coders.CoderException; import com.google.cloud.dataflow.sdk.coders.IterableCoder; import com.google.cloud.dataflow.sdk.coders.KvCoder; import com.google.cloud.dataflow.sdk.coders.KvCoderBase; import com.google.cloud.dataflow.sdk.coders.MapCoder; import com.google.cloud.dataflow.sdk.coders.MapCoderBase; import com.google.cloud.dataflow.sdk.values.TypeDescriptor; import com.google.common.base.Throwables; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeInfo.As; import com.fasterxml.jackson.annotation.JsonTypeInfo.Id; import com.fasterxml.jackson.databind.DatabindContext; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.annotation.JsonTypeIdResolver; import com.fasterxml.jackson.databind.jsontype.impl.TypeIdResolverBase; import com.fasterxml.jackson.databind.module.SimpleModule; import com.fasterxml.jackson.databind.type.TypeFactory; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.ref.SoftReference; import java.lang.reflect.ParameterizedType; import java.lang.reflect.TypeVariable; /** * Utilities for working with Coders. */ public final class CoderUtils { private CoderUtils() {} // Non-instantiable /** * Coder class-name alias for a key-value type. */ public static final String KIND_PAIR = "kind:pair"; /** * Coder class-name alias for a stream type. */ public static final String KIND_STREAM = "kind:stream"; private static ThreadLocal<SoftReference<ExposedByteArrayOutputStream>> threadLocalOutputStream = new ThreadLocal<>(); /** * If true, a call to {@code encodeToByteArray} is already on the call stack. */ private static ThreadLocal<Boolean> threadLocalOutputStreamInUse = new ThreadLocal<Boolean>() { @Override protected Boolean initialValue() { return false; } }; /** * Encodes the given value using the specified Coder, and returns * the encoded bytes. * * <p>This function is not reentrant; it should not be called from methods of the provided * {@link Coder}. */ public static <T> byte[] encodeToByteArray(Coder<T> coder, T value) throws CoderException { return encodeToByteArray(coder, value, Coder.Context.OUTER); } public static <T> byte[] encodeToByteArray(Coder<T> coder, T value, Coder.Context context) throws CoderException { if (threadLocalOutputStreamInUse.get()) { // encodeToByteArray() is called recursively and the thread local stream is in use, // allocating a new one. ByteArrayOutputStream stream = new ExposedByteArrayOutputStream(); encodeToSafeStream(coder, value, stream, context); return stream.toByteArray(); } else { threadLocalOutputStreamInUse.set(true); try { ByteArrayOutputStream stream = getThreadLocalOutputStream(); encodeToSafeStream(coder, value, stream, context); return stream.toByteArray(); } finally { threadLocalOutputStreamInUse.set(false); } } } /** * Encodes {@code value} to the given {@code stream}, which should be a stream that never throws * {@code IOException}, such as {@code ByteArrayOutputStream} or * {@link ExposedByteArrayOutputStream}. */ private static <T> void encodeToSafeStream( Coder<T> coder, T value, OutputStream stream, Coder.Context context) throws CoderException { try { coder.encode(value, new UnownedOutputStream(stream), context); } catch (IOException exn) { Throwables.propagateIfPossible(exn, CoderException.class); throw new IllegalArgumentException( "Forbidden IOException when writing to OutputStream", exn); } } /** * Decodes the given bytes using the specified Coder, and returns * the resulting decoded value. */ public static <T> T decodeFromByteArray(Coder<T> coder, byte[] encodedValue) throws CoderException { return decodeFromByteArray(coder, encodedValue, Coder.Context.OUTER); } public static <T> T decodeFromByteArray( Coder<T> coder, byte[] encodedValue, Coder.Context context) throws CoderException { try (ExposedByteArrayInputStream stream = new ExposedByteArrayInputStream(encodedValue)) { T result = decodeFromSafeStream(coder, stream, context); if (stream.available() != 0) { throw new CoderException( stream.available() + " unexpected extra bytes after decoding " + result); } return result; } } /** * Decodes a value from the given {@code stream}, which should be a stream that never throws * {@code IOException}, such as {@code ByteArrayInputStream} or * {@link ExposedByteArrayInputStream}. */ private static <T> T decodeFromSafeStream( Coder<T> coder, InputStream stream, Coder.Context context) throws CoderException { try { return coder.decode(new UnownedInputStream(stream), context); } catch (IOException exn) { Throwables.propagateIfPossible(exn, CoderException.class); throw new IllegalArgumentException( "Forbidden IOException when reading from InputStream", exn); } } private static ByteArrayOutputStream getThreadLocalOutputStream() { SoftReference<ExposedByteArrayOutputStream> refStream = threadLocalOutputStream.get(); ExposedByteArrayOutputStream stream = refStream == null ? null : refStream.get(); if (stream == null) { stream = new ExposedByteArrayOutputStream(); threadLocalOutputStream.set(new SoftReference<>(stream)); } stream.reset(); return stream; } /** * Clones the given value by encoding and then decoding it with the specified Coder. * * <p>This function is not reentrant; it should not be called from methods of the provided * {@link Coder}. */ public static <T> T clone(Coder<T> coder, T value) throws CoderException { return decodeFromByteArray(coder, encodeToByteArray(coder, value, Coder.Context.OUTER)); } /** * Encodes the given value using the specified Coder, and returns the Base64 encoding of the * encoded bytes. * * @throws CoderException if there are errors during encoding. */ public static <T> String encodeToBase64(Coder<T> coder, T value) throws CoderException { byte[] rawValue = encodeToByteArray(coder, value); return Base64.encodeBase64URLSafeString(rawValue); } /** * Parses a value from a base64-encoded String using the given coder. */ public static <T> T decodeFromBase64(Coder<T> coder, String encodedValue) throws CoderException { return decodeFromSafeStream( coder, new ByteArrayInputStream(Base64.decodeBase64(encodedValue)), Coder.Context.OUTER); } /** * If {@code coderType} is a subclass of {@code Coder<T>} for a specific * type {@code T}, returns {@code T.class}. */ @SuppressWarnings({"rawtypes", "unchecked"}) public static TypeDescriptor getCodedType(TypeDescriptor coderDescriptor) { ParameterizedType coderType = (ParameterizedType) coderDescriptor.getSupertype(Coder.class).getType(); TypeDescriptor codedType = TypeDescriptor.of(coderType.getActualTypeArguments()[0]); return codedType; } public static CloudObject makeCloudEncoding( String type, CloudObject... componentSpecs) { CloudObject encoding = CloudObject.forClassName(type); if (componentSpecs.length > 0) { addList(encoding, PropertyNames.COMPONENT_ENCODINGS, componentSpecs); } return encoding; } /** * A {@link com.fasterxml.jackson.databind.Module} that adds the type * resolver needed for Coder definitions created by the Dataflow service. */ static final class Jackson2Module extends SimpleModule { /** * The Coder custom type resolver. * * <p>This resolver resolves coders. If the Coder ID is a particular * well-known identifier supplied by the Dataflow service, it's replaced * with the corresponding class. All other Coder instances are resolved * by class name, using the package com.google.cloud.dataflow.sdk.coders * if there are no "."s in the ID. */ private static final class Resolver extends TypeIdResolverBase { @SuppressWarnings("unused") // Used via @JsonTypeIdResolver annotation on Mixin public Resolver() { super(TypeFactory.defaultInstance().constructType(Coder.class), TypeFactory.defaultInstance()); } @Deprecated @Override public JavaType typeFromId(String id) { return typeFromId(null, id); } @Override public JavaType typeFromId(DatabindContext context, String id) { Class<?> clazz = getClassForId(id); if (clazz == KvCoder.class) { clazz = KvCoderBase.class; } if (clazz == MapCoder.class) { clazz = MapCoderBase.class; } @SuppressWarnings("rawtypes") TypeVariable[] tvs = clazz.getTypeParameters(); JavaType[] types = new JavaType[tvs.length]; for (int lupe = 0; lupe < tvs.length; lupe++) { types[lupe] = TypeFactory.unknownType(); } return _typeFactory.constructSimpleType(clazz, types); } private Class<?> getClassForId(String id) { try { if (id.contains(".")) { return Class.forName(id); } if (id.equals(KIND_STREAM)) { return IterableCoder.class; } else if (id.equals(KIND_PAIR)) { return KvCoder.class; } // Otherwise, see if the ID is the name of a class in // com.google.cloud.dataflow.sdk.coders. We do this via creating // the class object so that class loaders have a chance to get // involved -- and since we need the class object anyway. return Class.forName(Coder.class.getPackage().getName() + "." + id); } catch (ClassNotFoundException e) { throw new RuntimeException("Unable to convert coder ID " + id + " to class", e); } } @Override public String idFromValueAndType(Object o, Class<?> clazz) { return clazz.getName(); } @Override public String idFromValue(Object o) { return o.getClass().getName(); } @Override public JsonTypeInfo.Id getMechanism() { return JsonTypeInfo.Id.CUSTOM; } } /** * The mixin class defining how Coders are handled by the deserialization * {@link ObjectMapper}. * * <p>This is done via a mixin so that this resolver is <i>only</i> used * during deserialization requested by the Dataflow SDK. */ @JsonTypeIdResolver(Resolver.class) @JsonTypeInfo(use = Id.CUSTOM, include = As.PROPERTY, property = PropertyNames.OBJECT_TYPE_NAME) private static final class Mixin {} public Jackson2Module() { super("DataflowCoders"); setMixInAnnotation(Coder.class, Mixin.class); } } }
sdk/src/main/java/com/google/cloud/dataflow/sdk/util/CoderUtils.java
/* * Copyright (C) 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.dataflow.sdk.util; import static com.google.cloud.dataflow.sdk.util.Structs.addList; import com.google.api.client.util.Base64; import com.google.cloud.dataflow.sdk.coders.Coder; import com.google.cloud.dataflow.sdk.coders.CoderException; import com.google.cloud.dataflow.sdk.coders.IterableCoder; import com.google.cloud.dataflow.sdk.coders.KvCoder; import com.google.cloud.dataflow.sdk.coders.KvCoderBase; import com.google.cloud.dataflow.sdk.coders.MapCoder; import com.google.cloud.dataflow.sdk.coders.MapCoderBase; import com.google.cloud.dataflow.sdk.values.TypeDescriptor; import com.google.common.base.Throwables; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeInfo.As; import com.fasterxml.jackson.annotation.JsonTypeInfo.Id; import com.fasterxml.jackson.databind.DatabindContext; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.annotation.JsonTypeIdResolver; import com.fasterxml.jackson.databind.jsontype.impl.TypeIdResolverBase; import com.fasterxml.jackson.databind.module.SimpleModule; import com.fasterxml.jackson.databind.type.TypeFactory; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.ref.SoftReference; import java.lang.reflect.ParameterizedType; import java.lang.reflect.TypeVariable; /** * Utilities for working with Coders. */ public final class CoderUtils { private CoderUtils() {} // Non-instantiable /** * Coder class-name alias for a key-value type. */ public static final String KIND_PAIR = "kind:pair"; /** * Coder class-name alias for a stream type. */ public static final String KIND_STREAM = "kind:stream"; private static ThreadLocal<SoftReference<ExposedByteArrayOutputStream>> threadLocalOutputStream = new ThreadLocal<>(); /** * If true, a call to {@code encodeToByteArray} is already on the call stack. */ private static ThreadLocal<Boolean> threadLocalOutputStreamInUse = new ThreadLocal<Boolean>() { @Override protected Boolean initialValue() { return false; } }; /** * Encodes the given value using the specified Coder, and returns * the encoded bytes. * * <p>This function is not reentrant; it should not be called from methods of the provided * {@link Coder}. */ public static <T> byte[] encodeToByteArray(Coder<T> coder, T value) throws CoderException { return encodeToByteArray(coder, value, Coder.Context.OUTER); } public static <T> byte[] encodeToByteArray(Coder<T> coder, T value, Coder.Context context) throws CoderException { if (threadLocalOutputStreamInUse.get()) { // encodeToByteArray() is called recursively and the thread local stream is in use, // allocating a new one. ByteArrayOutputStream stream = new ExposedByteArrayOutputStream(); encodeToSafeStream(coder, value, stream, context); return stream.toByteArray(); } else { threadLocalOutputStreamInUse.set(true); try { ByteArrayOutputStream stream = getThreadLocalOutputStream(); encodeToSafeStream(coder, value, stream, context); return stream.toByteArray(); } finally { threadLocalOutputStreamInUse.set(false); } } } /** * Encodes {@code value} to the given {@code stream}, which should be a stream that never throws * {@code IOException}, such as {@code ByteArrayOutputStream} or * {@link ExposedByteArrayOutputStream}. */ private static <T> void encodeToSafeStream( Coder<T> coder, T value, OutputStream stream, Coder.Context context) throws CoderException { try { coder.encode(value, new UnownedOutputStream(stream), context); } catch (IOException exn) { Throwables.propagateIfPossible(exn, CoderException.class); throw new IllegalArgumentException( "Forbidden IOException when writing to OutputStream", exn); } } /** * Decodes the given bytes using the specified Coder, and returns * the resulting decoded value. */ public static <T> T decodeFromByteArray(Coder<T> coder, byte[] encodedValue) throws CoderException { return decodeFromByteArray(coder, encodedValue, Coder.Context.OUTER); } public static <T> T decodeFromByteArray( Coder<T> coder, byte[] encodedValue, Coder.Context context) throws CoderException { try (ExposedByteArrayInputStream stream = new ExposedByteArrayInputStream(encodedValue)) { T result = decodeFromSafeStream(coder, stream, context); if (stream.available() != 0) { throw new CoderException( stream.available() + " unexpected extra bytes after decoding " + result); } return result; } } /** * Decodes a value from the given {@code stream}, which should be a stream that never throws * {@code IOException}, such as {@code ByteArrayInputStream} or * {@link ExposedByteArrayInputStream}. */ private static <T> T decodeFromSafeStream( Coder<T> coder, InputStream stream, Coder.Context context) throws CoderException { try { return coder.decode(new UnownedInputStream(stream), context); } catch (IOException exn) { Throwables.propagateIfPossible(exn, CoderException.class); throw new IllegalArgumentException( "Forbidden IOException when reading from InputStream", exn); } } private static ByteArrayOutputStream getThreadLocalOutputStream() { SoftReference<ExposedByteArrayOutputStream> refStream = threadLocalOutputStream.get(); ExposedByteArrayOutputStream stream = refStream == null ? null : refStream.get(); if (stream == null) { stream = new ExposedByteArrayOutputStream(); threadLocalOutputStream.set(new SoftReference<>(stream)); } stream.reset(); return stream; } /** * Clones the given value by encoding and then decoding it with the specified Coder. * * <p>This function is not reentrant; it should not be called from methods of the provided * {@link Coder}. */ public static <T> T clone(Coder<T> coder, T value) throws CoderException { return decodeFromByteArray(coder, encodeToByteArray(coder, value, Coder.Context.OUTER)); } /** * Encodes the given value using the specified Coder, and returns the Base64 encoding of the * encoded bytes. * * @throws CoderException if there are errors during encoding. */ public static <T> String encodeToBase64(Coder<T> coder, T value) throws CoderException { byte[] rawValue = encodeToByteArray(coder, value); return Base64.encodeBase64URLSafeString(rawValue); } /** * Parses a value from a base64-encoded String using the given coder. */ public static <T> T decodeFromBase64(Coder<T> coder, String encodedValue) throws CoderException { return decodeFromSafeStream( coder, new ByteArrayInputStream(Base64.decodeBase64(encodedValue)), Coder.Context.OUTER); } /** * If {@code coderType} is a subclass of {@code Coder<T>} for a specific * type {@code T}, returns {@code T.class}. */ @SuppressWarnings({"rawtypes", "unchecked"}) public static TypeDescriptor getCodedType(TypeDescriptor coderDescriptor) { ParameterizedType coderType = (ParameterizedType) coderDescriptor.getSupertype(Coder.class).getType(); TypeDescriptor codedType = TypeDescriptor.of(coderType.getActualTypeArguments()[0]); return codedType; } public static CloudObject makeCloudEncoding( String type, CloudObject... componentSpecs) { CloudObject encoding = CloudObject.forClassName(type); if (componentSpecs.length > 0) { addList(encoding, PropertyNames.COMPONENT_ENCODINGS, componentSpecs); } return encoding; } /** * A {@link com.fasterxml.jackson.databind.Module} that adds the type * resolver needed for Coder definitions created by the Dataflow service. */ static final class Jackson2Module extends SimpleModule { /** * The Coder custom type resolver. * * <p>This resolver resolves coders. If the Coder ID is a particular * well-known identifier supplied by the Dataflow service, it's replaced * with the corresponding class. All other Coder instances are resolved * by class name, using the package com.google.cloud.dataflow.sdk.coders * if there are no "."s in the ID. */ private static final class Resolver extends TypeIdResolverBase { @SuppressWarnings("unused") // Used via @JsonTypeIdResolver annotation on Mixin public Resolver() { super(TypeFactory.defaultInstance().constructType(Coder.class), TypeFactory.defaultInstance()); } @Deprecated @Override public JavaType typeFromId(String id) { return typeFromId(null, id); } @Override public JavaType typeFromId(DatabindContext context, String id) { Class<?> clazz = getClassForId(id); if (clazz == KvCoder.class) { clazz = KvCoderBase.class; } if (clazz == MapCoder.class) { clazz = MapCoderBase.class; } @SuppressWarnings("rawtypes") TypeVariable[] tvs = clazz.getTypeParameters(); JavaType[] types = new JavaType[tvs.length]; for (int lupe = 0; lupe < tvs.length; lupe++) { types[lupe] = TypeFactory.unknownType(); } return _typeFactory.constructSimpleType(clazz, types); } private Class<?> getClassForId(String id) { try { if (id.contains(".")) { return Class.forName(id); } if (id.equals(KIND_STREAM)) { return IterableCoder.class; } else if (id.equals(KIND_PAIR)) { return KvCoder.class; } // Otherwise, see if the ID is the name of a class in // com.google.cloud.dataflow.sdk.coders. We do this via creating // the class object so that class loaders have a chance to get // involved -- and since we need the class object anyway. return Class.forName("com.google.cloud.dataflow.sdk.coders." + id); } catch (ClassNotFoundException e) { throw new RuntimeException("Unable to convert coder ID " + id + " to class", e); } } @Override public String idFromValueAndType(Object o, Class<?> clazz) { return clazz.getName(); } @Override public String idFromValue(Object o) { return o.getClass().getName(); } @Override public JsonTypeInfo.Id getMechanism() { return JsonTypeInfo.Id.CUSTOM; } } /** * The mixin class defining how Coders are handled by the deserialization * {@link ObjectMapper}. * * <p>This is done via a mixin so that this resolver is <i>only</i> used * during deserialization requested by the Dataflow SDK. */ @JsonTypeIdResolver(Resolver.class) @JsonTypeInfo(use = Id.CUSTOM, include = As.PROPERTY, property = PropertyNames.OBJECT_TYPE_NAME) private static final class Mixin {} public Jackson2Module() { super("DataflowCoders"); setMixInAnnotation(Coder.class, Mixin.class); } } }
This closes #20
sdk/src/main/java/com/google/cloud/dataflow/sdk/util/CoderUtils.java
This closes #20
Java
apache-2.0
c1f6707df3d67703647710178d0df2fd31518010
0
mertakdut/EpubParser
package com.github.mertakdut; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import org.apache.commons.codec.binary.Base64; import org.xml.sax.SAXException; import com.github.mertakdut.BaseFindings.XmlItem; import com.github.mertakdut.Package.Metadata; import com.github.mertakdut.exception.OutOfPagesException; import com.github.mertakdut.exception.ReadingException; class Content { private Logger logger; private String zipFilePath; private Container container; private Package opfPackage; private Toc toc; private List<String> entryNames; private Map<String, List<Tag>> entryTagPositions; private List<String> nonExistingHrefList; private int playOrder; // private int maxContentPerSection; // String length. private BookSection lastBookSectionInfo; public Content() { logger = new Logger(); entryNames = new ArrayList<>(); container = new Container(); opfPackage = new Package(); toc = new Toc(); } // Debug public void print() { System.out.println("Printing zipEntryNames...\n"); for (int i = 0; i < entryNames.size(); i++) { System.out.println("(" + i + ")" + entryNames.get(i)); } getContainer().print(); getPackage().print(); getToc().print(); } // public BookSection getNextBookSection() throws ReadingException { // NavPoint navPoint = getNavPoint(this.playOrder++); // return prepareBookSection(navPoint, this.playOrder); // } // // public BookSection getPrevBookSection() throws ReadingException { // NavPoint navPoint = getNavPoint(this.playOrder--); // return prepareBookSection(navPoint, this.playOrder); // } BookSection getBookSection(int index) throws ReadingException, OutOfPagesException { BookSection bookSection = null; int orderDiff = index - this.playOrder; while (orderDiff > 0) { // Out of order. Calculate the ones before first. calculateBookSection(--orderDiff); } NavPoint navPoint = getNavPoint(index); if (Optionals.maxContentPerSection == 0 || navPoint.getTypeCode() == 0 || navPoint.getTypeCode() == 1) { // Real navPoint - actual file/anchor. // logger.log(Severity.info, "\nindex: " + index + ", Real(at least for now...) navPoint"); bookSection = prepareBookSection(navPoint, index); } else { // Pseudo navPoint - trimmed file entry. // logger.log(Severity.info, "\nindex: " + index + ", Pseudo navPoint"); bookSection = prepareTrimmedBookSection(navPoint, index); } this.playOrder++; return bookSection; } private NavPoint getNavPoint(int index) throws ReadingException, OutOfPagesException { if (index >= 0) { if (getToc() != null) { List<NavPoint> navPoints = getToc().getNavMap().getNavPoints(); if (index >= navPoints.size()) { throw new OutOfPagesException("Out of bounds at position: " + index); } return navPoints.get(index); } else { throw new ReadingException("Term of Contents is null."); } } else { throw new ReadingException("Index can't be less than 0"); } } // TODO: A new method for only calculating book sections. This will also be useful for pre-loading the whole book. private void calculateBookSection(int index) throws ReadingException, OutOfPagesException { NavPoint navPoint = getNavPoint(index); if (Optionals.maxContentPerSection == 0 || navPoint.getTypeCode() == 0 || navPoint.getTypeCode() == 1) { // Real navPoint - actual file/anchor. // logger.log(Severity.info, "\nindex: " + index + ", Real(at least for now...) navPoint"); prepareBookSection(navPoint, index); } else { // Pseudo navPoint - trimmed file entry. // logger.log(Severity.info, "\nindex: " + index + ", Pseudo navPoint"); prepareTrimmedBookSection(navPoint, index); } } private BookSection prepareBookSection(NavPoint navPoint, int index) throws ReadingException, OutOfPagesException { BookSection bookSection = new BookSection(); int entryStartPosition = navPoint.getBodyTrimStartPosition(); int entryEndPosition = navPoint.getBodyTrimEndPosition(); String entryEntryName = navPoint.getEntryName(); String fileContentStr = null; String htmlBody = null; String htmlBodyToReplace = null; // Warning: This is not always working as the content is calculated before. Calculated content may have its entryStartPosition and entryEndPosition 0(Zero). e.g. when no trim needed on htmlBody. if (entryStartPosition == 0 && entryEndPosition == 0) { // Not calculated before. String[] entryNameAndLabel = findEntryNameAndLabel(navPoint); String href = entryNameAndLabel[0]; String label = entryNameAndLabel[1]; String currentAnchor = null; String nextAnchor = null; int trimStartPosition = 0; int trimEndPosition = 0; boolean isSourceFileFound = false; for (int i = 0; i < getEntryNames().size(); i++) { String entryName = getEntryNames().get(i); String fileName = ContextHelper.encodeToUtf8(ContextHelper.getTextAfterCharacter(entryName, Constants.SLASH)); if (href.equals(fileName) || (href.startsWith(fileName) && href.replace(fileName, "").startsWith("%23"))) { isSourceFileFound = true; fileContentStr = readFileContent(entryName); htmlBody = getHtmlBody(fileContentStr); // This must not be changed. // entryTagPositions only used in either in trimming or including text content. if ((Optionals.maxContentPerSection != 0 && Optionals.maxContentPerSection < htmlBody.length()) || Optionals.isIncludingTextContent) { // Calculate the tag positions of the current entry, if it hasn't done before. if (entryTagPositions == null || !entryTagPositions.containsKey(entryName)) { if (entryTagPositions == null) { entryTagPositions = new HashMap<>(); } calculateEntryTagPositions(entryName, htmlBody); } } if (!href.equals(fileName)) { // Anchored, e.g. #pgepubid00058 boolean isFileReadFirstTime = isFileReadFirstTime(index, entryName); if (isFileReadFirstTime) { // No previous anchor; so it should start from the beginning to the current anchor. NavPoint currentEntryNavPoint = new NavPoint(); currentEntryNavPoint.setTypeCode(0); currentEntryNavPoint.setContentSrc(fileName); // href or fileName? getToc().getNavMap().getNavPoints().add(index, currentEntryNavPoint); nextAnchor = href.replace(fileName, ""); } else { currentAnchor = href.replace(fileName, ""); nextAnchor = getNextAnchor(index, entryName); } } if (currentAnchor != null || nextAnchor != null) { // Splitting the file by anchors. currentAnchor = convertAnchorToHtml(currentAnchor); nextAnchor = convertAnchorToHtml(nextAnchor); if (currentAnchor != null && nextAnchor != null) { int currentAnchorIndex = htmlBody.indexOf(currentAnchor); int nextAnchorIndex = htmlBody.indexOf(nextAnchor); // Abnormality in toc.ncx file. Its order is probably given wrong. // Warning: This may break the navPoints order if all the order is malformed. if (currentAnchorIndex > nextAnchorIndex) { int tmp = currentAnchorIndex; currentAnchorIndex = nextAnchorIndex; nextAnchorIndex = tmp; Collections.swap(getToc().getNavMap().getNavPoints(), index, index + 1); } if (currentAnchorIndex == -1 || nextAnchorIndex == -1) { int tmpIndex = index; if (currentAnchorIndex == -1 && nextAnchorIndex == -1) { // Both of the anchors not found. getToc().getNavMap().getNavPoints().get(tmpIndex++).setMarkedToDelete(true); // Delete the first one (current anchor) getToc().getNavMap().getNavPoints().get(tmpIndex++).setMarkedToDelete(true); // Delete the second one (next anchor) currentAnchor = null; nextAnchor = null; } else if (currentAnchorIndex == -1) { // Current anchor not found. getToc().getNavMap().getNavPoints().get(tmpIndex++).setMarkedToDelete(true); // Delete the first one (current anchor) currentAnchor = nextAnchor; } else if (nextAnchorIndex == -1) { // Next anchor not found. getToc().getNavMap().getNavPoints().get(++tmpIndex).setMarkedToDelete(true); // Delete the second one (next anchor) nextAnchor = null; } int markedNavPoints = tmpIndex - index; // Next available anchor should be the next starting point. while (tmpIndex < getToc().getNavMap().getNavPoints().size()) { // Looping until next anchor is found. boolean isCurrentNavPointMarked = true; NavPoint possiblyNextNavPoint = getNavPoint(tmpIndex); String[] possiblyNextEntryNameLabel = findEntryNameAndLabel(possiblyNextNavPoint); String possiblyNextEntryName = possiblyNextEntryNameLabel[0]; if (possiblyNextEntryName != null) { if (possiblyNextEntryName.startsWith(fileName) && possiblyNextEntryName.replace(fileName, "").startsWith("%23")) { String anchor = possiblyNextEntryName.replace(fileName, ""); anchor = convertAnchorToHtml(anchor); if (htmlBody.contains(anchor)) { if (currentAnchor == null) { // If current anchor is not found, first set that. currentAnchor = anchor; isCurrentNavPointMarked = false; } else { // If current anchor is already defined set the next anchor and break. nextAnchor = anchor; break; } } } else { // TODO: Next content is not the same file as the current one. Anchors are broken. Navigate to the next file. break; } } if (isCurrentNavPointMarked) { getToc().getNavMap().getNavPoints().get(tmpIndex).setMarkedToDelete(true); markedNavPoints++; } tmpIndex++; } if (markedNavPoints != 0) { if (markedNavPoints == getToc().getNavMap().getNavPoints().size() && markedNavPoints > 1) { throw new ReadingException("There are no items left in TOC. Toc.ncx file is probably malformed."); } for (Iterator<NavPoint> iterator = getToc().getNavMap().getNavPoints().iterator(); iterator.hasNext();) { NavPoint navPointToDelete = iterator.next(); if (navPointToDelete.isMarkedToDelete()) { iterator.remove(); if (--markedNavPoints == 0) { break; } } } } } } int[] bodyIntervals = getAnchorsInterval(htmlBody, currentAnchor, nextAnchor); trimStartPosition = bodyIntervals[0]; trimEndPosition = bodyIntervals[1]; } String extension = ContextHelper.getTextAfterCharacter(fileName, Constants.DOT); String mediaType = getMediaType(fileName); // If fileContentStr is too long; crop it by the maxContentPerSection. // Save the fileContent and position within a new navPoint, insert it after current index. if (Optionals.maxContentPerSection != 0) { // maxContentPerSection is given. int calculatedTrimEndPosition = calculateTrimEndPosition(entryName, htmlBody, trimStartPosition, trimEndPosition); if (calculatedTrimEndPosition != -1) { trimEndPosition = calculatedTrimEndPosition; htmlBodyToReplace = htmlBody.substring(trimStartPosition, trimEndPosition); NavPoint nextEntryNavPoint = new NavPoint(); nextEntryNavPoint.setTypeCode(2); nextEntryNavPoint.setEntryName(entryName); nextEntryNavPoint.setBodyTrimStartPosition(trimEndPosition); getToc().getNavMap().getNavPoints().add(index + 1, nextEntryNavPoint); // Inserting calculated info to avoid calculating this navPoint again. In the future these data could be written to Term of Contents file. getToc().getNavMap().getNavPoints().get(index).setTypeCode(2); // To indicate that, this is a trimmed part. TODO: Change these with constants. getToc().getNavMap().getNavPoints().get(index).setEntryName(entryName); getToc().getNavMap().getNavPoints().get(index).setBodyTrimStartPosition(trimStartPosition); getToc().getNavMap().getNavPoints().get(index).setBodyTrimEndPosition(trimEndPosition); if (lastBookSectionInfo == null) { lastBookSectionInfo = new BookSection(); } lastBookSectionInfo.setExtension(extension); lastBookSectionInfo.setLabel(label); lastBookSectionInfo.setMediaType(mediaType); } else { htmlBodyToReplace = getNonTrimmedHtmlBody(index, htmlBody, trimStartPosition, trimEndPosition, entryName); } } else { htmlBodyToReplace = getNonTrimmedHtmlBody(index, htmlBody, trimStartPosition, trimEndPosition, entryName); } bookSection.setExtension(extension); bookSection.setLabel(label); bookSection.setMediaType(mediaType); if (Optionals.isIncludingTextContent) { bookSection.setSectionTextContent(getOnlyTextContent(entryName, htmlBody, trimStartPosition, trimEndPosition)); } if (Optionals.cssStatus == CssStatus.OMIT) { markTableTags(entryName, htmlBody, htmlBodyToReplace, trimStartPosition, trimEndPosition); } htmlBodyToReplace = appendIncompleteTags(htmlBody, htmlBodyToReplace, entryName, index, trimStartPosition, trimEndPosition); break; } } if (!isSourceFileFound) { logger.log(Logger.Severity.warning, "Source file not found!"); getToc().getNavMap().getNavPoints().remove(index); return getBookSection(index); } } else { // Calculated before. fileContentStr = readFileContent(entryEntryName); htmlBody = getHtmlBody(fileContentStr); if (entryEndPosition != 0) { htmlBodyToReplace = htmlBody.substring(entryStartPosition, entryEndPosition); } else { htmlBodyToReplace = htmlBody.substring(entryStartPosition); } if (Optionals.isIncludingTextContent) { bookSection.setSectionTextContent(getOnlyTextContent(entryEntryName, htmlBody, entryStartPosition, entryEndPosition)); } if (Optionals.cssStatus == CssStatus.OMIT) { markTableTags(entryEntryName, htmlBody, htmlBodyToReplace, entryStartPosition, entryEndPosition); } htmlBodyToReplace = appendIncompleteTags(htmlBody, htmlBodyToReplace, entryEntryName, index, entryStartPosition, entryEndPosition); } htmlBodyToReplace = replaceImgTag(htmlBodyToReplace); fileContentStr = fileContentStr.replace(htmlBody, htmlBodyToReplace); if (Optionals.cssStatus == CssStatus.DISTRIBUTE) { fileContentStr = dissolveStyleTag(fileContentStr); } bookSection.setSectionContent(fileContentStr); return bookSection; } private BookSection prepareTrimmedBookSection(NavPoint entryNavPoint, int index) throws ReadingException, OutOfPagesException { BookSection bookSection = new BookSection(); String entryName = entryNavPoint.getEntryName(); int bodyTrimStartPosition = entryNavPoint.getBodyTrimStartPosition(); int bodyTrimEndPosition = entryNavPoint.getBodyTrimEndPosition(); // Will be calculated on the first attempt. // logger.log(Severity.info, "index: " + index + ", entryName: " + entryName + ", bodyTrimStartPosition: " + bodyTrimStartPosition + ", bodyTrimEndPosition: " // + bodyTrimEndPosition + ", entryOpenedTags: " + entryOpenedTags + ", entryClosingTags: " + entryClosingTags); String fileContent = readFileContent(entryName); String htmlBody = getHtmlBody(fileContent); String htmlBodyToReplace = null; if (bodyTrimEndPosition == 0) { // Not calculated before. String nextAnchor = getNextAnchor(index, entryName); if (nextAnchor != null) { // Next anchor is available in the same file. It may be the next stop for the content. String nextAnchorHtml = convertAnchorToHtml(nextAnchor); int anchorIndex = htmlBody.indexOf(nextAnchorHtml); if (anchorIndex != -1 && bodyTrimStartPosition <= anchorIndex) { while (htmlBody.charAt(anchorIndex) != Constants.TAG_OPENING) { // Getting just before anchor html. anchorIndex--; } bodyTrimEndPosition = anchorIndex; } else { // NextAnchor not found in the htmlContent. Invalidate it by removing it from navPoints and search for the next one. bodyTrimEndPosition = getNextAvailableAnchorIndex(index, entryName, bodyTrimStartPosition, htmlBody); } } int calculatedTrimEndPosition = calculateTrimEndPosition(entryName, htmlBody, bodyTrimStartPosition, bodyTrimEndPosition); if (calculatedTrimEndPosition != -1) { // Trimming again if needed. bodyTrimEndPosition = calculatedTrimEndPosition; htmlBodyToReplace = htmlBody.substring(bodyTrimStartPosition, bodyTrimEndPosition); NavPoint nextEntryNavPoint = new NavPoint(); nextEntryNavPoint.setTypeCode(2); nextEntryNavPoint.setEntryName(entryName); nextEntryNavPoint.setBodyTrimStartPosition(bodyTrimEndPosition); getToc().getNavMap().getNavPoints().add(index + 1, nextEntryNavPoint); getToc().getNavMap().getNavPoints().get(index).setBodyTrimEndPosition(bodyTrimEndPosition); // Sets endPosition to avoid calculating again. } else { htmlBodyToReplace = getNonTrimmedHtmlBody(index, htmlBody, bodyTrimStartPosition, bodyTrimEndPosition, entryName); } } else { // Calculated before. htmlBodyToReplace = htmlBody.substring(bodyTrimStartPosition, bodyTrimEndPosition); // bodyTrimEndPosition may be zero? } if (Optionals.cssStatus == CssStatus.OMIT) { markTableTags(entryName, htmlBody, htmlBodyToReplace, bodyTrimStartPosition, bodyTrimEndPosition); } htmlBodyToReplace = appendIncompleteTags(htmlBody, htmlBodyToReplace, entryName, index, bodyTrimStartPosition, bodyTrimEndPosition); htmlBodyToReplace = replaceImgTag(htmlBodyToReplace); if (Optionals.isIncludingTextContent) { bookSection.setSectionTextContent(getOnlyTextContent(entryName, htmlBody, bodyTrimStartPosition, bodyTrimEndPosition)); } fileContent = fileContent.replace(htmlBody, htmlBodyToReplace); if (Optionals.cssStatus == CssStatus.DISTRIBUTE) { fileContent = dissolveStyleTag(fileContent); } bookSection.setSectionContent(fileContent); if (this.lastBookSectionInfo != null) { bookSection.setExtension(this.lastBookSectionInfo.getExtension()); bookSection.setLabel(this.lastBookSectionInfo.getLabel()); bookSection.setMediaType(this.lastBookSectionInfo.getMediaType()); } return bookSection; } private String getNonTrimmedHtmlBody(int index, String htmlBody, int trimStartPosition, int trimEndPosition, String entryName) { String htmlBodyToReplace = null; if (trimEndPosition == 0) { htmlBodyToReplace = htmlBody.substring(trimStartPosition); } else { htmlBodyToReplace = htmlBody.substring(trimStartPosition, trimEndPosition); } getToc().getNavMap().getNavPoints().get(index).setBodyTrimStartPosition(trimStartPosition); getToc().getNavMap().getNavPoints().get(index).setBodyTrimEndPosition(trimEndPosition); getToc().getNavMap().getNavPoints().get(index).setEntryName(entryName); return htmlBodyToReplace; } /* * This method calculates and keeps every tag indices of the given entry file. Later on, these calculations will be used when trimming the entry. * * e.g. If the open-close tag indices are in the same trimmed part; tag will be closed there and won't disturb the next trimmed part. * * If the open-close tag indices are not in the same trimmed part; tag will be closed at the end of the current trimmed part, and opened in the next trimmed part. */ private void calculateEntryTagPositions(String entryName, String htmlBody) { List<Tag> openedTags = null; ListIterator<Tag> listIterator = null; boolean isPossiblyTagOpened = false; StringBuilder possiblyTag = new StringBuilder(); Pattern pattern = Pattern.compile(Constants.HTML_TAG_PATTERN); Matcher matcher; for (int i = 0; i < htmlBody.length(); i++) { if (htmlBody.charAt(i) == Constants.TAG_OPENING) { // Tag might have been opened. isPossiblyTagOpened = true; possiblyTag.setLength(0); // In case of double occurence of '<' start from the next found tag opening; e.g. '< <p>'. } else if (htmlBody.charAt(i) == Constants.TAG_CLOSING) { // Tag might have been closed. possiblyTag.append(Constants.TAG_CLOSING); if (htmlBody.charAt(i - 1) != '/') { // Not an empty tag. String tagStr = possiblyTag.toString(); matcher = pattern.matcher(tagStr); if (matcher.matches()) { if (tagStr.charAt(1) == '/') { // Closing tag. Match it with the last open tag with the same name. String tagName = getFullTagName(tagStr, false); listIterator = openedTags.listIterator(openedTags.size()); while (listIterator.hasPrevious()) { Tag openedTag = listIterator.previous(); if (openedTag.getTagName().equals(tagName)) { // Found the last open tag with the same name. addEntryTagPosition(entryName, openedTag.getFullTagName(), openedTag.getOpeningTagStartPosition(), i - tagName.length() - 1); listIterator.remove(); break; } } } else { // Opening tag. if (openedTags == null) { openedTags = new ArrayList<>(); } String fullTagName = getFullTagName(tagStr, true); String tagName = getTagName(fullTagName); Tag tag = new Tag(); tag.setTagName(tagName); tag.setFullTagName(fullTagName); tag.setOpeningTagStartPosition(i - fullTagName.length()); openedTags.add(tag); } } } else { // Empty tag. String tagStr = possiblyTag.toString(); matcher = pattern.matcher(tagStr); if (matcher.matches()) { int closingBracletIndex = tagStr.indexOf(Constants.TAG_CLOSING); String tagName = tagStr.substring(1, closingBracletIndex - 1); addEntryTagPosition(entryName, tagName, i - tagName.length() - 1, i - tagName.length() - 1); } } possiblyTag.setLength(0); isPossiblyTagOpened = false; } if (isPossiblyTagOpened) { possiblyTag.append(htmlBody.charAt(i)); } } } private void addEntryTagPosition(String entryName, String fullTagName, int openingPosition, int closingPosition) { Tag tag = new Tag(); tag.setOpeningTagStartPosition(openingPosition); tag.setClosingTagStartPosition(closingPosition); tag.setFullTagName(fullTagName); tag.setTagName(getTagName(fullTagName)); if (this.entryTagPositions.containsKey(entryName)) { List<Tag> tagList = this.entryTagPositions.get(entryName); int index = tagList.size(); while (index > 0 && tagList.get(index - 1).getOpeningTagStartPosition() > openingPosition) { index--; } this.entryTagPositions.get(entryName).add(index, tag); } else { List<Tag> tagList = new ArrayList<>(); tagList.add(tag); this.entryTagPositions.put(entryName, tagList); } } private String getFullTagName(String tag, boolean isOpeningTag) { int closingBracletIndex = tag.indexOf(Constants.TAG_CLOSING); if (isOpeningTag) { return tag.substring(1, closingBracletIndex); } else { return tag.substring(2, closingBracletIndex); } } private String getTagName(String fullTagName) { if (fullTagName.contains(" ")) { fullTagName = fullTagName.trim(); int endIndex = 1; while (fullTagName.length() > endIndex && fullTagName.charAt(endIndex) != ' ') { endIndex++; } return fullTagName.substring(0, endIndex); } else { return fullTagName; } } // TODO: Similar functionality happens in the prepareBookSection method. Merge them into this. private int getNextAvailableAnchorIndex(int index, String entryName, int bodyTrimStartPosition, String htmlBody) throws ReadingException, OutOfPagesException { getToc().getNavMap().getNavPoints().remove(++index); // Removing the nextAnchor from navPoints; 'cause it's already not found. int markedNavPoints = 0; int anchorIndex = -1; boolean isNextAnchorFound = false; // Next available anchor should be the next starting point. while (index < getToc().getNavMap().getNavPoints().size()) { // Looping until next anchor is found. NavPoint possiblyNextNavPoint = getNavPoint(index); String[] possiblyNextEntryNameLabel = findEntryNameAndLabel(possiblyNextNavPoint); String possiblyNextEntryName = possiblyNextEntryNameLabel[0]; if (possiblyNextEntryName != null) { String fileName = ContextHelper.encodeToUtf8(ContextHelper.getTextAfterCharacter(entryName, Constants.SLASH)); if (possiblyNextEntryName.startsWith(fileName) && possiblyNextEntryName.replace(fileName, "").startsWith("%23")) { String anchor = possiblyNextEntryName.replace(fileName, ""); String anchorHtml = convertAnchorToHtml(anchor); anchorIndex = htmlBody.indexOf(anchorHtml); if (anchorIndex != -1) { while (htmlBody.charAt(anchorIndex) != Constants.TAG_OPENING) { // Getting just before anchor html. anchorIndex--; } if (bodyTrimStartPosition <= anchorIndex) { // getToc().getNavMap().getNavPoints().get(index).setBodyTrimEndPosition(anchorIndex); // Sets endPosition to avoid calculating again. isNextAnchorFound = true; break; } } } else { // TODO: Next content is not the same file as the current one. Anchors are broken. Navigate to the next file. break; } } getToc().getNavMap().getNavPoints().get(index).setMarkedToDelete(true); markedNavPoints++; index++; } if (markedNavPoints != 0) { for (Iterator<NavPoint> iterator = getToc().getNavMap().getNavPoints().iterator(); iterator.hasNext();) { NavPoint navPointToDelete = iterator.next(); if (navPointToDelete.isMarkedToDelete()) { iterator.remove(); if (--markedNavPoints == 0) { break; } } } } if (isNextAnchorFound) { return anchorIndex; } else { return 0; } } private String prepareOpeningTags(List<Tag> openedTags) { StringBuilder openingTagsBuilder = new StringBuilder(); for (ListIterator<Tag> iterator = openedTags.listIterator(); iterator.hasNext();) { openingTagsBuilder.append(Constants.TAG_OPENING).append(iterator.next().getFullTagName()).append(Constants.TAG_CLOSING); } return openingTagsBuilder.toString(); } private String prepareClosingTags(List<Tag> openedTags) { StringBuilder closingTagsBuilder = new StringBuilder(); for (ListIterator<Tag> iterator = openedTags.listIterator(openedTags.size()); iterator.hasPrevious();) { closingTagsBuilder.append(Constants.TAG_START).append(iterator.previous().getTagName()).append(Constants.TAG_CLOSING); } return closingTagsBuilder.toString(); } private int calculateTrimEndPosition(String entryName, String htmlBody, int trimStartPosition, int trimEndPos) { int trimEndPosition = (trimEndPos != 0 && (trimEndPos - trimStartPosition) < Optionals.maxContentPerSection) ? trimEndPos : trimStartPosition + Optionals.maxContentPerSection; int htmlBodyLength = htmlBody.length(); // Don't need to trim. HtmlBody with tags are already below limit. if (htmlBodyLength < trimEndPosition || (trimEndPosition - trimStartPosition) < Optionals.maxContentPerSection) { return -1; } List<Tag> tagStartEndPositions = this.entryTagPositions.get(entryName); int loopCount = 0; int lastTagsLength = 0; while (true) { int tagsLength = 0; for (Tag tag : tagStartEndPositions) { if (tag.getOpeningTagStartPosition() > trimEndPosition) { break; } if (tag.getOpeningTagStartPosition() == tag.getClosingTagStartPosition()) { if (tag.getOpeningTagStartPosition() > trimStartPosition && tag.getOpeningTagStartPosition() < trimEndPosition) { // Empty Tag. tagsLength += tag.getFullTagName().length() + 3; // < /> } } else { if (tag.getOpeningTagStartPosition() > trimStartPosition && tag.getOpeningTagStartPosition() < trimEndPosition) { // Opening tag. tagsLength += tag.getFullTagName().length() + 2; // < > } if (tag.getClosingTagStartPosition() > trimStartPosition && tag.getClosingTagStartPosition() < trimEndPosition) { // Closing tag. tagsLength += tag.getTagName().length() + 3; // < /> } } } if (lastTagsLength == tagsLength) { // Tags length isn't greater than the last one. No need to keep going. if (loopCount == 0) { // Returned on the first try. Don't need to trim. HtmlBody without tags are already below limit. if (tagsLength == 0 && htmlBodyLength > trimEndPosition) { // If there are no tags in the trimmed part. break; } return -1; } else { break; } } trimEndPosition += tagsLength; // If trimEndPosition is over the htmlBody's index; then htmlBody is already within limits. No need to trim. if (trimEndPosition >= htmlBodyLength) { return -1; } if (((trimEndPosition - trimStartPosition) - tagsLength) >= Optionals.maxContentPerSection) { break; } lastTagsLength = tagsLength; loopCount++; } // TODO: Regex to find table tags like: <table(*.?)>[</table>|</>] // TODO: This may break the maxContentPerSection rule. Check if the table content will exceed the limit. int tableStartIndex = htmlBody.indexOf(Constants.TAG_TABLE_START, trimStartPosition); // If interval has table, don't break the table. if (tableStartIndex != -1 && tableStartIndex < trimEndPosition) { int tableEndIndex = htmlBody.indexOf(Constants.TAG_TABLE_END, tableStartIndex); if (tableEndIndex != -1) { trimEndPosition = tableEndIndex + Constants.TAG_TABLE_END.length(); } else { trimEndPosition = findEligibleEndPosition(tagStartEndPositions, htmlBody, trimEndPosition); } } else { trimEndPosition = findEligibleEndPosition(tagStartEndPositions, htmlBody, trimEndPosition); } return trimEndPosition; } // Checks if we are in an html tag. If so, move forward or backward until the tag is over. Else, move backwards until we hit the blank. private int findEligibleEndPosition(List<Tag> tagStartEndPositions, String htmlBody, int trimEndPosition) { boolean isMovedToEndOfTag = false; for (Tag tag : tagStartEndPositions) { if (tag.getOpeningTagStartPosition() > trimEndPosition) { break; } if (tag.getOpeningTagStartPosition() == tag.getClosingTagStartPosition()) { // Empty tag. // Inside an empty tag. if (tag.getOpeningTagStartPosition() < trimEndPosition && (tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 2) > trimEndPosition) { while (htmlBody.charAt(trimEndPosition) != Constants.TAG_CLOSING) { trimEndPosition++; } trimEndPosition++; isMovedToEndOfTag = true; break; } } else { // Inside an opening tag. if (tag.getOpeningTagStartPosition() < trimEndPosition && (tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 1) > trimEndPosition) { while (htmlBody.charAt(trimEndPosition) != Constants.TAG_OPENING) { trimEndPosition--; } // trimEndPosition--; isMovedToEndOfTag = true; break; } // Inside a closing tag. if (tag.getClosingTagStartPosition() < trimEndPosition && (tag.getClosingTagStartPosition() + tag.getTagName().length() + 2) > trimEndPosition) { while (htmlBody.charAt(trimEndPosition) != Constants.TAG_CLOSING) { trimEndPosition++; } trimEndPosition++; isMovedToEndOfTag = true; break; } } } if (!isMovedToEndOfTag) { // To avoid dividing the words in half. while (htmlBody.charAt(trimEndPosition) != ' ') { trimEndPosition--; // We may have hit a tag. if (htmlBody.charAt(trimEndPosition) == Constants.TAG_CLOSING) { trimEndPosition++; break; } else if (htmlBody.charAt(trimEndPosition) == Constants.TAG_OPENING) { break; } } } return trimEndPosition; } private String getNextAnchor(int index, String entryName) throws ReadingException, OutOfPagesException { if (getToc().getNavMap().getNavPoints().size() > (index + 1)) { NavPoint nextNavPoint = getNavPoint(index + 1); if (nextNavPoint.getTypeCode() != 2) { // Real navPoint. Only real navPoints are anchored. TODO: Change these with constants. String[] nextEntryLabel = findEntryNameAndLabel(nextNavPoint); String nextHref = nextEntryLabel[0]; if (nextHref != null) { String fileName = ContextHelper.encodeToUtf8(ContextHelper.getTextAfterCharacter(entryName, Constants.SLASH)); if (nextHref.startsWith(fileName) && nextHref.replace(fileName, "").startsWith("%23")) { // Both anchors are in the same file. return nextHref.replace(fileName, ""); } } } } return null; } private boolean isFileReadFirstTime(int index, String entryName) throws ReadingException, OutOfPagesException { if ((index - 1) >= 0) { NavPoint prevNavPoint = getNavPoint(index - 1); if (prevNavPoint.getTypeCode() == 2) { return false; } String prevHref = findEntryNameAndLabel(prevNavPoint)[0]; if (prevHref != null) { String fileName = ContextHelper.encodeToUtf8(ContextHelper.getTextAfterCharacter(entryName, Constants.SLASH)); if (prevHref.startsWith(fileName)) { // Same content as previous, not reading for the first time. (&& prevHref.replace(fileName, "").startsWith("%23")) return false; } } } return true; } private String[] findEntryNameAndLabel(NavPoint navPoint) throws ReadingException { if (navPoint.getContentSrc() != null) { return new String[] { navPoint.getContentSrc(), navPoint.getNavLabel() }; } throw new ReadingException("NavPoint content is not found in epub content."); } // TODO: This operation is getting expensive and expensive. fileContent could be held in cache; if the entry is same. Maybe a map with one element -> <entryName, fileContent> // If map doesn't contain that entryName -> then this method can be used. private String readFileContent(String entryName) throws ReadingException { ZipFile epubFile = null; try { epubFile = new ZipFile(zipFilePath); ZipEntry zipEntry = epubFile.getEntry(entryName); InputStream inputStream = epubFile.getInputStream(zipEntry); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8")); StringBuilder fileContent = new StringBuilder(); try { String line; while ((line = bufferedReader.readLine()) != null) { fileContent.append(line).append(" "); } } finally { bufferedReader.close(); } String fileContentStr = fileContent.toString(); if (Optionals.cssStatus != CssStatus.OMIT) { fileContentStr = replaceCssLinkWithActualCss(epubFile, fileContentStr); } return fileContentStr; } catch (IOException e) { e.printStackTrace(); throw new ReadingException("IOException while reading content " + entryName + e.getMessage()); } catch (ParserConfigurationException e) { e.printStackTrace(); throw new ReadingException("ParserConfigurationException while reading content " + entryName + e.getMessage()); } catch (SAXException e) { e.printStackTrace(); throw new ReadingException("SAXException while reading content " + entryName + e.getMessage()); } catch (TransformerException e) { e.printStackTrace(); throw new ReadingException("TransformerException while reading content " + entryName + e.getMessage()); } finally { try { if (epubFile != null) { epubFile.close(); } } catch (IOException e) { e.printStackTrace(); throw new ReadingException("Error closing ZipFile: " + e.getMessage()); } } } private String getHtmlBody(String htmlContent) throws ReadingException { int startOfBody = htmlContent.lastIndexOf(Constants.TAG_BODY_START); int endOfBody = htmlContent.lastIndexOf(Constants.TAG_BODY_END); int bodyStartEndIndex = startOfBody + Constants.TAG_BODY_START.length(); while (htmlContent.charAt(bodyStartEndIndex) != Constants.TAG_CLOSING) { bodyStartEndIndex++; } if (startOfBody != -1 && endOfBody != -1) { return htmlContent.substring(bodyStartEndIndex + 1, endOfBody); } else { throw new ReadingException("Exception while getting book section : Html body tags not found."); } } // Starts from current anchor, reads until the next anchor starts. private int[] getAnchorsInterval(String htmlBody, String currentAnchor, String nextAnchor) throws ReadingException { int startOfCurrentAnchor = -1; int startOfNextAnchor = -1; if (currentAnchor != null && !currentAnchor.equals("")) { startOfCurrentAnchor = htmlBody.indexOf(currentAnchor); } if (nextAnchor != null && !nextAnchor.equals("")) { startOfNextAnchor = htmlBody.indexOf(nextAnchor); } if (startOfCurrentAnchor != -1) { while (htmlBody.charAt(startOfCurrentAnchor) != Constants.TAG_OPENING) { startOfCurrentAnchor--; } } else { startOfCurrentAnchor = 0; } if (startOfNextAnchor != -1) { while (htmlBody.charAt(startOfNextAnchor) != Constants.TAG_OPENING) { startOfNextAnchor--; } } else { startOfNextAnchor = 0; } return new int[] { startOfCurrentAnchor, startOfNextAnchor }; // throw new ReadingException("Exception while trimming anchored parts : Defined Anchors not found."); } private String convertAnchorToHtml(String anchor) throws ReadingException { // #Page_1 to id="Page_1" converter if (anchor == null) { return null; } if (anchor.startsWith("#")) { // Anchors should start with # return "id=\"" + anchor.substring(1) + "\""; } else if (anchor.startsWith("%23")) { // Or UTF-8 equivalent of # return "id=\"" + anchor.substring(3) + "\""; } else { throw new ReadingException("Anchor does not start with #"); } } private String getMediaType(String fileName) { List<XmlItem> manifestItems = getPackage().getManifest().getXmlItemList(); for (int i = 0; i < manifestItems.size(); i++) { if (manifestItems.get(i).getAttributes().containsValue(fileName)) { if (manifestItems.get(i).getAttributes().containsKey("media-type")) { return manifestItems.get(i).getAttributes().get("media-type"); } } } return null; } // Distributing the css parts in the style tag to the belonging html tags. private String dissolveStyleTag(String trimmedFileContent) throws ReadingException { Pattern cssPattern = Pattern.compile("<style(.*?)>(.*?)</style>"); Matcher matcher = cssPattern.matcher(trimmedFileContent); while (matcher.find()) { // There may be multiple style tags. String styleTagStr = matcher.group(2); Map<String, String> cssMap = getCssMap(styleTagStr); String htmlBody = getHtmlBody(trimmedFileContent); String htmlBodyToReplace = putCssIntoTags(cssMap, htmlBody); trimmedFileContent = trimmedFileContent.replace(htmlBody, htmlBodyToReplace); trimmedFileContent = trimmedFileContent.replace("<style" + styleTagStr + "</style>", ""); } return trimmedFileContent; } private Map<String, String> getCssMap(String cssfileContent) { Map<String, String> cssMap = new HashMap<>(); Pattern cssPattern = Pattern.compile("\\{(.*?)\\}"); Matcher matcher = cssPattern.matcher(cssfileContent); while (matcher.find()) { String cssValue = matcher.group(1); int indexOfCurlyStart = matcher.start(); int indexOfCssNameStart = indexOfCurlyStart - 1; StringBuilder cssNameBuilder = new StringBuilder(); String cssName = null; while (indexOfCssNameStart >= 0) { // TODO: There may be multiple css names pointing to one cssValue e.g. .legalnotice p { text-align: left; } OR .legalnotice, p { text-align: left; } if (cssfileContent.charAt(indexOfCssNameStart) == '}' || cssfileContent.charAt(indexOfCssNameStart) == '/') { String builtCssName = cssNameBuilder.toString().trim(); if (builtCssName.length() > 0) { cssName = cssNameBuilder.reverse().toString().trim(); break; } } cssNameBuilder.append(cssfileContent.charAt(indexOfCssNameStart)); indexOfCssNameStart--; } List<String> cssNameList = null; // Seperate them here by ' ', ',' '>' (known seperators) String seperator = null; if (cssName.contains(",")) { seperator = ","; } else if (cssName.contains(">")) { seperator = ">"; } else if (cssName.contains(" ")) { seperator = " "; } if (seperator != null) { cssNameList = Arrays.asList(cssName.split(seperator)); } if (cssNameList == null) { // Has one css name if (cssMap.containsKey(cssName)) { cssMap.put(cssName, cssMap.get(cssName) + " " + cssValue); } else { cssMap.put(cssName, cssValue); } } else { // Has multiple css names for (String cssNameItem : cssNameList) { if (cssMap.containsKey(cssNameItem)) { cssMap.put(cssNameItem, cssMap.get(cssNameItem) + " " + cssValue); } else { cssMap.put(cssNameItem, cssValue); } } } } return cssMap; } // TODO: Search htmlBody tags by cssName and put cssValues where they found. // e.g. div.mert, "margin-left:30px; padding-top:25px" // <div class="mert"> -> <div style="margin-left:30px; padding-top:25px"> private String putCssIntoTags(Map<String, String> cssMap, String trimmedHtmlBody) { for (Map.Entry<String, String> cssEntry : cssMap.entrySet()) { String tagName = cssEntry.getKey(); String className = null; int classNameLength = 0; int dotIndex = cssEntry.getKey().indexOf("."); if (dotIndex > 0) { // e.g. div.mert className = cssEntry.getKey().substring(dotIndex + 1); classNameLength = className.length(); tagName = cssEntry.getKey().substring(0, dotIndex); } int startTagIndex = trimmedHtmlBody.indexOf("<" + tagName); while (startTagIndex != -1) { int endTagIndex = startTagIndex; while (trimmedHtmlBody.charAt(endTagIndex) != '>') { endTagIndex++; } endTagIndex++; // Not an empty tag and big enough for class attribute. if (trimmedHtmlBody.charAt(endTagIndex - 1) != '/' && (endTagIndex - startTagIndex) > (5 + classNameLength)) { String tag = trimmedHtmlBody.substring(startTagIndex, endTagIndex); if (className == null || tag.contains(className)) { // Remove redundant class. if (className != null) { int classEndIndex = tag.indexOf(className); int classStartIndex = classEndIndex - 1; while (tag.charAt(classStartIndex) != 'c') { classStartIndex--; } tag = tag.substring(0, classStartIndex) + tag.substring(classEndIndex + classNameLength + 1, tag.length()); } int styleIndex = tag.indexOf("style=\""); String tagToReplace = null; if (styleIndex != -1) { // Already has a style tag. Put the value into it. tagToReplace = tag.substring(0, styleIndex + 6) + cssEntry.getValue() + tag.substring(styleIndex + 6, tag.length()); } else { int insertStyleIndex = 1 + tagName.length() + 1; // '<' and ' ' tagToReplace = tag.substring(0, insertStyleIndex) + "style=\"" + cssEntry.getValue() + "\" " + tag.substring(insertStyleIndex, tag.length()); } trimmedHtmlBody = trimmedHtmlBody.replaceFirst(tag, tagToReplace); } } startTagIndex = trimmedHtmlBody.indexOf("<" + tagName, startTagIndex + 1); } } return trimmedHtmlBody; } private String replaceCssLinkWithActualCss(ZipFile epubFile, String htmlContent) throws IOException, ParserConfigurationException, ReadingException, SAXException, TransformerException { // <link rel="stylesheet" type="text/css" href="docbook-epub.css"/> Pattern linkTagPattern = Pattern.compile("<link.*?/>|<link.*?</link>"); Pattern hrefPattern = Pattern.compile("href=\"(.*?)\""); Matcher linkMatcher = linkTagPattern.matcher(htmlContent); while (linkMatcher.find()) { String linkTag = linkMatcher.group(0); Matcher hrefMatcher = hrefPattern.matcher(linkTag); if (hrefMatcher.find()) { String cssHref = ContextHelper.getTextAfterCharacter(hrefMatcher.group(1), Constants.SLASH); if (cssHref.endsWith(".css")) { // Should we check for its type as well? text/css if (nonExistingHrefList != null && nonExistingHrefList.contains(cssHref)) { // logger.log(Logger.Severity.warning, "Already not found on the first try. Skipping the search for(Css) : " + cssHref); htmlContent = htmlContent.replace(linkTag, ""); } else { boolean isCssFileFound = false; for (int i = 0; i < getEntryNames().size(); i++) { String entryName = getEntryNames().get(i); String fileName = ContextHelper.encodeToUtf8(ContextHelper.getTextAfterCharacter(entryName, Constants.SLASH)); if (cssHref.equals(fileName)) { // css exists. isCssFileFound = true; ZipEntry zipEntry = epubFile.getEntry(entryName); InputStream zipEntryInputStream = epubFile.getInputStream(zipEntry); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(zipEntryInputStream)); StringBuilder fileContent = new StringBuilder(); fileContent.append("<style type=\"text/css\">"); try { String line; while ((line = bufferedReader.readLine()) != null) { fileContent.append(line); } } finally { bufferedReader.close(); } fileContent.append("</style>"); htmlContent = htmlContent.replace(linkTag, fileContent.toString()); break; } } if (!isCssFileFound) { logger.log(Logger.Severity.warning, "Referenced css file not found!"); if (nonExistingHrefList == null) { nonExistingHrefList = new ArrayList<>(); } nonExistingHrefList.add(cssHref); htmlContent = htmlContent.replace(cssHref, ""); } } } } } return htmlContent; } private String replaceImgTag(String htmlBody) throws ReadingException { Pattern imgTagPattern = Pattern.compile("<img.*?/>|<img.*?</img>"); Pattern srcPattern = Pattern.compile("src=\"(.*?)\""); Matcher imgTagMatcher = imgTagPattern.matcher(htmlBody); while (imgTagMatcher.find()) { String imgPart = imgTagMatcher.group(0); Matcher srcMatcher = srcPattern.matcher(imgPart); if (srcMatcher.find()) { String srcHref = ContextHelper.getTextAfterCharacter(srcMatcher.group(1), Constants.SLASH); String encodedSrcHref = ContextHelper.encodeToUtf8(srcHref); if (nonExistingHrefList != null && nonExistingHrefList.contains(srcHref)) { // logger.log(Logger.Severity.warning, "Already not found on the first try. Skipping the search for(Img) : " + srcMatcher); htmlBody = htmlBody.replace(imgPart, ""); } else { boolean isImageFileFound = false; for (int i = 0; i < getEntryNames().size(); i++) { String entryName = getEntryNames().get(i); String fileName = ContextHelper.encodeToUtf8(ContextHelper.getTextAfterCharacter(entryName, Constants.SLASH)); if (encodedSrcHref.equals(fileName)) { // image exists. isImageFileFound = true; ZipFile epubFile = null; try { String extension = ContextHelper.getTextAfterCharacter(fileName, Constants.DOT); epubFile = new ZipFile(this.zipFilePath); ZipEntry zipEntry = epubFile.getEntry(entryName); InputStream zipEntryInputStream = epubFile.getInputStream(zipEntry); // Convert inputStream to Base64Binary. byte[] imageAsBytes = ContextHelper.convertIsToByteArray(zipEntryInputStream); byte[] imageAsBase64 = Base64.encodeBase64(imageAsBytes); String imageContent = new String(imageAsBase64); String src = "data:image/" + extension + ";base64," + imageContent; htmlBody = htmlBody.replace(srcHref, src); break; } catch (IOException e) { e.printStackTrace(); } finally { if (epubFile != null) { try { epubFile.close(); } catch (IOException e) { e.printStackTrace(); } } } } } if (!isImageFileFound) { logger.log(Logger.Severity.warning, "Referenced image file not found: " + srcHref); if (nonExistingHrefList == null) { nonExistingHrefList = new ArrayList<>(); } nonExistingHrefList.add(srcHref); htmlBody = htmlBody.replace(imgPart, ""); } } } } return htmlBody; } // Warning: May devour anchors. private void markTableTags(String entryName, String htmlBody, String htmlBodyToReplace, int trimStartPosition, int trimEndPosition) { Pattern tableTagPattern = Pattern.compile("<table.*?>", Pattern.DOTALL); Matcher tableTagMatcher = tableTagPattern.matcher(htmlBodyToReplace); if (tableTagMatcher.find()) { if (entryTagPositions == null || !entryTagPositions.containsKey(entryName)) { if (entryTagPositions == null) { entryTagPositions = new HashMap<>(); } calculateEntryTagPositions(entryName, htmlBody); } List<Tag> tagStartEndPositions = this.entryTagPositions.get(entryName); List<Tag> tableTagList = new ArrayList<>(); for (Tag tag : tagStartEndPositions) { if (tag.getOpeningTagStartPosition() > trimEndPosition) { break; } if (tag.getTagName().equals("table")) { if (tag.getOpeningTagStartPosition() != tag.getClosingTagStartPosition()) { // Not an empty table tag. if (tag.getOpeningTagStartPosition() > trimStartPosition && tag.getOpeningTagStartPosition() < trimEndPosition) { // Opening tag is within scope. tableTagList.add(tag); // if (tag.getClosingTagStartPosition() > trimStartPosition && tag.getClosingTagStartPosition() < trimEndPosition) { // Closing tag is also withing scope. // // } } } } } // Remove nested tables. List<Tag> smallerTableTagList = new ArrayList<>(); for (int i = 0; i < tableTagList.size(); i++) { int tag1StartPosition = tableTagList.get(i).getOpeningTagStartPosition(); int tag1EndPosition = tableTagList.get(i).getClosingTagStartPosition(); for (int j = i + 1; j < tableTagList.size(); j++) { int tag2StartPosition = tableTagList.get(j).getOpeningTagStartPosition(); int tag2EndPosition = tableTagList.get(j).getClosingTagStartPosition(); if (tag1StartPosition > tag2StartPosition && tag1EndPosition < tag2EndPosition) { smallerTableTagList.add(tableTagList.get(i)); } else if (tag2StartPosition > tag1StartPosition && tag2EndPosition < tag1EndPosition) { smallerTableTagList.add(tableTagList.get(j)); } } } tableTagList.removeAll(smallerTableTagList); markTableTags(entryName, htmlBody, trimStartPosition, trimEndPosition, tableTagList); } } private void markTableTags(String entryName, String htmlBody, int trimStartPosition, int trimEndPosition, List<Tag> tableTagPositions) { List<Tag> tagStartEndPositions = this.entryTagPositions.get(entryName); for (int i = 0; i < tableTagPositions.size(); i++) { int tableStartPosition = tableTagPositions.get(i).getOpeningTagStartPosition(); int tableEndPosition = tableTagPositions.get(i).getClosingTagStartPosition(); for (Tag tag : tagStartEndPositions) { if (tag.getOpeningTagStartPosition() > tableEndPosition) { break; } // Exclude img tags to save images in table tag. // if(tagInfo.getTagName().equals("img")) { // continue; // } if (tag.getOpeningTagStartPosition() == tag.getClosingTagStartPosition()) { // Empty Tag if (tag.getOpeningTagStartPosition() > tableStartPosition && tag.getOpeningTagStartPosition() < tableEndPosition) { tag.setOmitted(true); } } else { if (tag.getOpeningTagStartPosition() > tableStartPosition && tag.getOpeningTagStartPosition() < tableEndPosition) { // Opening tag. tag.setOmitted(true); } if (tag.getClosingTagStartPosition() > tableStartPosition && tag.getClosingTagStartPosition() < tableEndPosition) { // Closing tag. tag.setOmitted(true); } } } } } // Removes all the tags from htmlBody and returns it. private String getOnlyTextContent(String entryName, String htmlBody, int trimStartPosition, int trimEndPosition) { List<Tag> tagStartEndPositions = this.entryTagPositions.get(entryName); List<String> stringsToRemove = new ArrayList<>(); if (trimEndPosition == 0) { trimEndPosition = htmlBody.length(); } for (Tag tag : tagStartEndPositions) { // This may not work correctly. if (tag.getOpeningTagStartPosition() > trimEndPosition) { break; } if (tag.getOpeningTagStartPosition() == tag.getClosingTagStartPosition()) { // Empty Tag if (tag.getOpeningTagStartPosition() > trimStartPosition && tag.getOpeningTagStartPosition() < trimEndPosition) { htmlBody = htmlBody.substring(0, tag.getOpeningTagStartPosition() - 1) + Constants.STRING_MARKER + htmlBody.substring(tag.getOpeningTagStartPosition() - 1 + Constants.STRING_MARKER.length(), tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 2 - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER + htmlBody.substring(tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 2, htmlBody.length()); stringsToRemove.add(Constants.STRING_MARKER + htmlBody.substring(tag.getOpeningTagStartPosition() - 1 + Constants.STRING_MARKER.length(), tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 2 - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER); } } else { if (tag.getOpeningTagStartPosition() > trimStartPosition && tag.getOpeningTagStartPosition() < trimEndPosition) { // Opening tag. htmlBody = htmlBody.substring(0, tag.getOpeningTagStartPosition() - 1) + Constants.STRING_MARKER + htmlBody.substring(tag.getOpeningTagStartPosition() - 1 + Constants.STRING_MARKER.length(), tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 1 - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER + htmlBody.substring(tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 1, htmlBody.length()); stringsToRemove.add(Constants.STRING_MARKER + htmlBody.substring(tag.getOpeningTagStartPosition() - 1 + Constants.STRING_MARKER.length(), tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 1 - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER); } if (tag.getClosingTagStartPosition() > trimStartPosition && tag.getClosingTagStartPosition() < trimEndPosition) { // Closing tag. htmlBody = htmlBody.substring(0, tag.getClosingTagStartPosition() - 1) + Constants.STRING_MARKER + htmlBody.substring(tag.getClosingTagStartPosition() - 1 + Constants.STRING_MARKER.length(), tag.getClosingTagStartPosition() + tag.getTagName().length() + 2 - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER + htmlBody.substring(tag.getClosingTagStartPosition() + tag.getTagName().length() + 2, htmlBody.length()); stringsToRemove.add(Constants.STRING_MARKER + htmlBody.substring(tag.getClosingTagStartPosition() - 1 + Constants.STRING_MARKER.length(), tag.getClosingTagStartPosition() + tag.getTagName().length() + 2 - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER); } } } htmlBody = htmlBody.substring(trimStartPosition, trimEndPosition); for (String stringToRemove : stringsToRemove) { htmlBody = htmlBody.replace(stringToRemove, ""); } return htmlBody; } // TODO: Save these in navPoints as well avoid calculating again. private String appendIncompleteTags(String htmlBody, String htmlBodyToReplace, String entryName, int index, int trimStartPosition, int trimEndPosition) throws ReadingException { List<Tag> prevOpenedNotClosedYetTags = new ArrayList<>(); // Previously opened in this scope and not yet closed tags in scope. Appending opening and closing tags. List<Tag> openedNotClosedYetTags = new ArrayList<>(); // Opened in this scope and not yet closed tags in scope. Appending only closing tags. List<Tag> prevOpenedClosedTags = new ArrayList<>(); // Previously opened and closed in this scope. Appending only opening tags. List<Tag> currentEntryTags = this.entryTagPositions.get(entryName); trimEndPosition = trimEndPosition == 0 ? htmlBody.length() : trimEndPosition; for (int i = 0; i < currentEntryTags.size(); i++) { Tag tag = currentEntryTags.get(i); // TODO: break this when it's out of possibility. // Opened in the trimmed part, closed after the trimmed part. if (!tag.isOmitted() && tag.getOpeningTagStartPosition() > trimStartPosition && tag.getOpeningTagStartPosition() < trimEndPosition && tag.getClosingTagStartPosition() > trimEndPosition) { openedNotClosedYetTags.add(tag); } } List<Tag> prevOpenedTags = getToc().getNavMap().getNavPoints().get(index).getOpenTags(); if (prevOpenedTags != null) { for (Tag prevOpenedTag : prevOpenedTags) { // If the tag ends before text starts, tag should open and then close, // If tag does not end before text starts, tag should be placed in the beginning. if (prevOpenedTag.getClosingTagStartPosition() > trimEndPosition) { // Previously opened and not yet closed in scope tags. Should have a place in the beginning. prevOpenedNotClosedYetTags.add(prevOpenedTag); } else { // Previously opened but closed in scope tags. // TODO: Find these tags a position :( Or just append them from the beginning. I don't think it would break anything, would it? prevOpenedClosedTags.add(prevOpenedTag); } } } Pair<String, List<String>> htmlBodyMarkingsPair = null; if (Optionals.cssStatus == CssStatus.OMIT) { // Tag omitting only happens in replaceTableTag function when css status is given Omit. htmlBodyMarkingsPair = markOmittedTags(currentEntryTags, htmlBody, trimStartPosition, trimEndPosition); if (htmlBodyMarkingsPair != null) { htmlBody = htmlBodyMarkingsPair.getFirst(); } } // TODO: We shouldn't substring htmlBody before this method. if (trimEndPosition == htmlBody.length()) { htmlBodyToReplace = htmlBody.substring(trimStartPosition); } else { htmlBodyToReplace = htmlBody.substring(trimStartPosition, trimEndPosition); } if (htmlBodyMarkingsPair != null) { List<String> stringsToRemove = htmlBodyMarkingsPair.getSecond(); if (stringsToRemove != null) { for (String stringToRemove : stringsToRemove) { htmlBodyToReplace = htmlBodyToReplace.replace(stringToRemove, ""); } } } String openingTags = ""; String closingTags = ""; if (!openedNotClosedYetTags.isEmpty()) { closingTags += prepareClosingTags(openedNotClosedYetTags); } if (!prevOpenedNotClosedYetTags.isEmpty()) { openingTags += prepareOpeningTags(prevOpenedNotClosedYetTags); closingTags += prepareClosingTags(prevOpenedNotClosedYetTags); } if (!prevOpenedClosedTags.isEmpty()) { openingTags += prepareOpeningTags(prevOpenedClosedTags); } if (!openingTags.isEmpty() || !closingTags.isEmpty()) { htmlBodyToReplace = openingTags + htmlBodyToReplace + closingTags; } if (getToc().getNavMap().getNavPoints().size() > (index + 1)) { // If this is not the last page, next navPoint should start with not closed yet tags because they are not closed in this navPoint as well. openedNotClosedYetTags.addAll(prevOpenedNotClosedYetTags); getToc().getNavMap().getNavPoints().get(index + 1).setOpenTags(openedNotClosedYetTags.isEmpty() ? null : openedNotClosedYetTags); } else { openedNotClosedYetTags.addAll(prevOpenedNotClosedYetTags); if (!openedNotClosedYetTags.isEmpty()) { // openedTags should already be null if this is the last page. throw new ReadingException("Last Page has opened and not yet closed tags."); // For debugging purposes. } } return htmlBodyToReplace; } private Pair<String, List<String>> markOmittedTags(List<Tag> currentEntryTags, String htmlBody, int trimStartPosition, int trimEndPosition) { boolean isHtmlBodyModified = false; List<String> stringsToRemove = null; for (Tag tag : currentEntryTags) { if (tag.getOpeningTagStartPosition() > trimEndPosition) { break; } if (!tag.isOmitted()) { continue; } int fromIndex = -1; int toIndex = -1; if (tag.getOpeningTagStartPosition() == tag.getClosingTagStartPosition()) { // Empty Tag if (tag.getOpeningTagStartPosition() > trimStartPosition && tag.getOpeningTagStartPosition() < trimEndPosition) { fromIndex = tag.getOpeningTagStartPosition() - 1; toIndex = tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 2; } } else { if (tag.getOpeningTagStartPosition() > trimStartPosition && tag.getOpeningTagStartPosition() < trimEndPosition) { // Opening tag. fromIndex = tag.getOpeningTagStartPosition() - 1; toIndex = tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 1; } if (fromIndex != -1 && toIndex != -1) { htmlBody = htmlBody.substring(0, fromIndex) + Constants.STRING_MARKER + htmlBody.substring(fromIndex + Constants.STRING_MARKER.length(), toIndex - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER + htmlBody.substring(toIndex, htmlBody.length()); if (stringsToRemove == null) { stringsToRemove = new ArrayList<>(); } stringsToRemove.add(Constants.STRING_MARKER + htmlBody.substring(fromIndex + Constants.STRING_MARKER.length(), toIndex - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER); isHtmlBodyModified = true; } if (tag.getClosingTagStartPosition() > trimStartPosition && tag.getClosingTagStartPosition() < trimEndPosition) { // Closing tag. fromIndex = tag.getClosingTagStartPosition() - 1; toIndex = tag.getClosingTagStartPosition() + tag.getTagName().length() + 2; } } // If both opened and closed tags should be removed, skips the closing tag. if (fromIndex != -1 && toIndex != -1) { htmlBody = htmlBody.substring(0, fromIndex) + Constants.STRING_MARKER + htmlBody.substring(fromIndex + Constants.STRING_MARKER.length(), toIndex - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER + htmlBody.substring(toIndex, htmlBody.length()); if (stringsToRemove == null) { stringsToRemove = new ArrayList<>(); } stringsToRemove.add(Constants.STRING_MARKER + htmlBody.substring(fromIndex + Constants.STRING_MARKER.length(), toIndex - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER); isHtmlBodyModified = true; } } return isHtmlBodyModified ? new Pair<>(htmlBody, stringsToRemove) : null; } byte[] getCoverImage() throws ReadingException { Metadata metadata = this.opfPackage.getMetadata(); if (this.opfPackage != null && metadata != null) { String coverImageId = metadata.getCoverImageId(); if (coverImageId != null && !coverImageId.equals("")) { List<XmlItem> manifestXmlItems = this.opfPackage.getManifest().getXmlItemList(); for (XmlItem xmlItem : manifestXmlItems) { if (xmlItem.getAttributes().get("id").equals(coverImageId)) { String coverImageEntryName = xmlItem.getAttributes().get("href"); if (coverImageEntryName != null && !coverImageEntryName.equals("")) { ZipFile epubFile = null; try { try { epubFile = new ZipFile(this.getZipFilePath()); } catch (IOException e) { e.printStackTrace(); throw new ReadingException("Error initializing ZipFile: " + e.getMessage()); } for (String entryName : this.getEntryNames()) { // TODO: I might have to change this contains with equals. if (entryName.contains(coverImageEntryName)) { ZipEntry coverImageEntry = epubFile.getEntry(entryName); InputStream inputStream; try { inputStream = epubFile.getInputStream(coverImageEntry); } catch (IOException e) { e.printStackTrace(); throw new ReadingException("IOException while reading " + entryName + " file: " + e.getMessage()); } try { return ContextHelper.convertIsToByteArray(inputStream); } catch (IOException e) { e.printStackTrace(); throw new ReadingException("IOException while converting inputStream to byte array: " + e.getMessage()); } } } } finally { try { if (epubFile != null) { epubFile.close(); } } catch (IOException e) { e.printStackTrace(); throw new ReadingException("Error closing ZipFile: " + e.getMessage()); } } } } } } } return null; } List<String> getEntryNames() { return entryNames; } void addEntryName(String zipEntryName) { entryNames.add(zipEntryName); } Container getContainer() { return container; } Package getPackage() { return opfPackage; } Toc getToc() { return toc; } void setZipFilePath(String zipFilePath) { this.zipFilePath = zipFilePath; } String getZipFilePath() { return this.zipFilePath; } }
src/main/java/com/github/mertakdut/Content.java
package com.github.mertakdut; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import org.apache.commons.codec.binary.Base64; import org.xml.sax.SAXException; import com.github.mertakdut.BaseFindings.XmlItem; import com.github.mertakdut.Package.Metadata; import com.github.mertakdut.exception.OutOfPagesException; import com.github.mertakdut.exception.ReadingException; class Content { private Logger logger; private String zipFilePath; private Container container; private Package opfPackage; private Toc toc; private List<String> entryNames; private Map<String, List<Tag>> entryTagPositions; private List<String> nonExistingHrefList; private int playOrder; // private int maxContentPerSection; // String length. private BookSection lastBookSectionInfo; public Content() { logger = new Logger(); entryNames = new ArrayList<>(); container = new Container(); opfPackage = new Package(); toc = new Toc(); } // Debug public void print() { System.out.println("Printing zipEntryNames...\n"); for (int i = 0; i < entryNames.size(); i++) { System.out.println("(" + i + ")" + entryNames.get(i)); } getContainer().print(); getPackage().print(); getToc().print(); } // public BookSection getNextBookSection() throws ReadingException { // NavPoint navPoint = getNavPoint(this.playOrder++); // return prepareBookSection(navPoint, this.playOrder); // } // // public BookSection getPrevBookSection() throws ReadingException { // NavPoint navPoint = getNavPoint(this.playOrder--); // return prepareBookSection(navPoint, this.playOrder); // } BookSection getBookSection(int index) throws ReadingException, OutOfPagesException { BookSection bookSection = null; int orderDiff = index - this.playOrder; while (orderDiff > 0) { // Out of order. Calculate the ones before first. calculateBookSection(--orderDiff); } NavPoint navPoint = getNavPoint(index); if (Optionals.maxContentPerSection == 0 || navPoint.getTypeCode() == 0 || navPoint.getTypeCode() == 1) { // Real navPoint - actual file/anchor. // logger.log(Severity.info, "\nindex: " + index + ", Real(at least for now...) navPoint"); bookSection = prepareBookSection(navPoint, index); } else { // Pseudo navPoint - trimmed file entry. // logger.log(Severity.info, "\nindex: " + index + ", Pseudo navPoint"); bookSection = prepareTrimmedBookSection(navPoint, index); } this.playOrder++; return bookSection; } private NavPoint getNavPoint(int index) throws ReadingException, OutOfPagesException { if (index >= 0) { if (getToc() != null) { List<NavPoint> navPoints = getToc().getNavMap().getNavPoints(); if (index >= navPoints.size()) { throw new OutOfPagesException("Out of bounds at position: " + index); } return navPoints.get(index); } else { throw new ReadingException("Term of Contents is null."); } } else { throw new ReadingException("Index can't be less than 0"); } } // TODO: A new method for only calculating book sections. This will also be useful for pre-loading the whole book. private void calculateBookSection(int index) throws ReadingException, OutOfPagesException { NavPoint navPoint = getNavPoint(index); if (Optionals.maxContentPerSection == 0 || navPoint.getTypeCode() == 0 || navPoint.getTypeCode() == 1) { // Real navPoint - actual file/anchor. // logger.log(Severity.info, "\nindex: " + index + ", Real(at least for now...) navPoint"); prepareBookSection(navPoint, index); } else { // Pseudo navPoint - trimmed file entry. // logger.log(Severity.info, "\nindex: " + index + ", Pseudo navPoint"); prepareTrimmedBookSection(navPoint, index); } } private BookSection prepareBookSection(NavPoint navPoint, int index) throws ReadingException, OutOfPagesException { BookSection bookSection = new BookSection(); int entryStartPosition = navPoint.getBodyTrimStartPosition(); int entryEndPosition = navPoint.getBodyTrimEndPosition(); String entryEntryName = navPoint.getEntryName(); String fileContentStr = null; String htmlBody = null; String htmlBodyToReplace = null; // Warning: This is not always working as the content is calculated before. Calculated content may have its entryStartPosition and entryEndPosition 0(Zero). e.g. when no trim needed on htmlBody. if (entryStartPosition == 0 && entryEndPosition == 0) { // Not calculated before. String[] entryNameAndLabel = findEntryNameAndLabel(navPoint); String href = entryNameAndLabel[0]; String label = entryNameAndLabel[1]; String currentAnchor = null; String nextAnchor = null; int trimStartPosition = 0; int trimEndPosition = 0; boolean isSourceFileFound = false; for (int i = 0; i < getEntryNames().size(); i++) { String entryName = getEntryNames().get(i); String fileName = ContextHelper.encodeToUtf8(ContextHelper.getTextAfterCharacter(entryName, Constants.SLASH)); if (href.equals(fileName) || (href.startsWith(fileName) && href.replace(fileName, "").startsWith("%23"))) { isSourceFileFound = true; fileContentStr = readFileContent(entryName); htmlBody = getHtmlBody(fileContentStr); // This must not be changed. // entryTagPositions only used in either in trimming or including text content. if ((Optionals.maxContentPerSection != 0 && Optionals.maxContentPerSection < htmlBody.length()) || Optionals.isIncludingTextContent) { // Calculate the tag positions of the current entry, if it hasn't done before. if (entryTagPositions == null || !entryTagPositions.containsKey(entryName)) { if (entryTagPositions == null) { entryTagPositions = new HashMap<>(); } calculateEntryTagPositions(entryName, htmlBody); } } if (!href.equals(fileName)) { // Anchored, e.g. #pgepubid00058 boolean isFileReadFirstTime = isFileReadFirstTime(index, entryName); if (isFileReadFirstTime) { // No previous anchor; so it should start from the beginning to the current anchor. NavPoint currentEntryNavPoint = new NavPoint(); currentEntryNavPoint.setTypeCode(0); currentEntryNavPoint.setContentSrc(fileName); // href or fileName? getToc().getNavMap().getNavPoints().add(index, currentEntryNavPoint); nextAnchor = href.replace(fileName, ""); } else { currentAnchor = href.replace(fileName, ""); nextAnchor = getNextAnchor(index, entryName); } } if (currentAnchor != null || nextAnchor != null) { // Splitting the file by anchors. currentAnchor = convertAnchorToHtml(currentAnchor); nextAnchor = convertAnchorToHtml(nextAnchor); if (currentAnchor != null && nextAnchor != null) { int currentAnchorIndex = htmlBody.indexOf(currentAnchor); int nextAnchorIndex = htmlBody.indexOf(nextAnchor); // Abnormality in toc.ncx file. Its order is probably given wrong. // Warning: This may break the navPoints order if all the order is malformed. if (currentAnchorIndex > nextAnchorIndex) { int tmp = currentAnchorIndex; currentAnchorIndex = nextAnchorIndex; nextAnchorIndex = tmp; Collections.swap(getToc().getNavMap().getNavPoints(), index, index + 1); } if (currentAnchorIndex == -1 || nextAnchorIndex == -1) { int tmpIndex = index; if (currentAnchorIndex == -1 && nextAnchorIndex == -1) { // Both of the anchors not found. getToc().getNavMap().getNavPoints().get(tmpIndex++).setMarkedToDelete(true); // Delete the first one (current anchor) getToc().getNavMap().getNavPoints().get(tmpIndex++).setMarkedToDelete(true); // Delete the second one (next anchor) currentAnchor = null; nextAnchor = null; } else if (currentAnchorIndex == -1) { // Current anchor not found. getToc().getNavMap().getNavPoints().get(tmpIndex++).setMarkedToDelete(true); // Delete the first one (current anchor) currentAnchor = nextAnchor; } else if (nextAnchorIndex == -1) { // Next anchor not found. getToc().getNavMap().getNavPoints().get(++tmpIndex).setMarkedToDelete(true); // Delete the second one (next anchor) nextAnchor = null; } int markedNavPoints = tmpIndex - index; // Next available anchor should be the next starting point. while (tmpIndex < getToc().getNavMap().getNavPoints().size()) { // Looping until next anchor is found. boolean isCurrentNavPointMarked = true; NavPoint possiblyNextNavPoint = getNavPoint(tmpIndex); String[] possiblyNextEntryNameLabel = findEntryNameAndLabel(possiblyNextNavPoint); String possiblyNextEntryName = possiblyNextEntryNameLabel[0]; if (possiblyNextEntryName != null) { if (possiblyNextEntryName.startsWith(fileName) && possiblyNextEntryName.replace(fileName, "").startsWith("%23")) { String anchor = possiblyNextEntryName.replace(fileName, ""); anchor = convertAnchorToHtml(anchor); if (htmlBody.contains(anchor)) { if (currentAnchor == null) { // If current anchor is not found, first set that. currentAnchor = anchor; isCurrentNavPointMarked = false; } else { // If current anchor is already defined set the next anchor and break. nextAnchor = anchor; break; } } } else { // TODO: Next content is not the same file as the current one. Anchors are broken. Navigate to the next file. break; } } if (isCurrentNavPointMarked) { getToc().getNavMap().getNavPoints().get(tmpIndex).setMarkedToDelete(true); markedNavPoints++; } tmpIndex++; } if (markedNavPoints != 0) { if (markedNavPoints == getToc().getNavMap().getNavPoints().size() && markedNavPoints > 1) { throw new ReadingException("There are no items left in TOC. Toc.ncx file is probably malformed."); } for (Iterator<NavPoint> iterator = getToc().getNavMap().getNavPoints().iterator(); iterator.hasNext();) { NavPoint navPointToDelete = iterator.next(); if (navPointToDelete.isMarkedToDelete()) { iterator.remove(); if (--markedNavPoints == 0) { break; } } } } } } int[] bodyIntervals = getAnchorsInterval(htmlBody, currentAnchor, nextAnchor); trimStartPosition = bodyIntervals[0]; trimEndPosition = bodyIntervals[1]; } String extension = ContextHelper.getTextAfterCharacter(fileName, Constants.DOT); String mediaType = getMediaType(fileName); // If fileContentStr is too long; crop it by the maxContentPerSection. // Save the fileContent and position within a new navPoint, insert it after current index. if (Optionals.maxContentPerSection != 0) { // maxContentPerSection is given. int calculatedTrimEndPosition = calculateTrimEndPosition(entryName, htmlBody, trimStartPosition, trimEndPosition); if (calculatedTrimEndPosition != -1) { trimEndPosition = calculatedTrimEndPosition; htmlBodyToReplace = htmlBody.substring(trimStartPosition, trimEndPosition); NavPoint nextEntryNavPoint = new NavPoint(); nextEntryNavPoint.setTypeCode(2); nextEntryNavPoint.setEntryName(entryName); nextEntryNavPoint.setBodyTrimStartPosition(trimEndPosition); getToc().getNavMap().getNavPoints().add(index + 1, nextEntryNavPoint); // Inserting calculated info to avoid calculating this navPoint again. In the future these data could be written to Term of Contents file. getToc().getNavMap().getNavPoints().get(index).setTypeCode(2); // To indicate that, this is a trimmed part. TODO: Change these with constants. getToc().getNavMap().getNavPoints().get(index).setEntryName(entryName); getToc().getNavMap().getNavPoints().get(index).setBodyTrimStartPosition(trimStartPosition); getToc().getNavMap().getNavPoints().get(index).setBodyTrimEndPosition(trimEndPosition); if (lastBookSectionInfo == null) { lastBookSectionInfo = new BookSection(); } lastBookSectionInfo.setExtension(extension); lastBookSectionInfo.setLabel(label); lastBookSectionInfo.setMediaType(mediaType); } else { htmlBodyToReplace = getNonTrimmedHtmlBody(index, htmlBody, trimStartPosition, trimEndPosition, entryName); } } else { htmlBodyToReplace = getNonTrimmedHtmlBody(index, htmlBody, trimStartPosition, trimEndPosition, entryName); } bookSection.setExtension(extension); bookSection.setLabel(label); bookSection.setMediaType(mediaType); if (Optionals.isIncludingTextContent) { bookSection.setSectionTextContent(getOnlyTextContent(entryName, htmlBody, trimStartPosition, trimEndPosition)); } if (Optionals.cssStatus == CssStatus.OMIT) { String modifiedHtmlBody = replaceTableTag(entryName, htmlBody, htmlBodyToReplace, trimStartPosition, trimEndPosition); htmlBodyToReplace = appendIncompleteTags(modifiedHtmlBody, htmlBodyToReplace, entryName, index, trimStartPosition, trimEndPosition); } else { htmlBodyToReplace = appendIncompleteTags(htmlBody, htmlBodyToReplace, entryName, index, trimStartPosition, trimEndPosition); } break; } } if (!isSourceFileFound) { logger.log(Logger.Severity.warning, "Source file not found!"); getToc().getNavMap().getNavPoints().remove(index); return getBookSection(index); } } else { // Calculated before. fileContentStr = readFileContent(entryEntryName); htmlBody = getHtmlBody(fileContentStr); if (entryEndPosition != 0) { htmlBodyToReplace = htmlBody.substring(entryStartPosition, entryEndPosition); } else { htmlBodyToReplace = htmlBody.substring(entryStartPosition); } if (Optionals.isIncludingTextContent) { bookSection.setSectionTextContent(getOnlyTextContent(entryEntryName, htmlBody, entryStartPosition, entryEndPosition)); } if (Optionals.cssStatus == CssStatus.OMIT) { String modifiedHtmlBody = replaceTableTag(entryEntryName, htmlBody, htmlBodyToReplace, entryStartPosition, entryEndPosition); htmlBodyToReplace = appendIncompleteTags(modifiedHtmlBody, htmlBodyToReplace, entryEntryName, index, entryStartPosition, entryEndPosition); } else { htmlBodyToReplace = appendIncompleteTags(htmlBody, htmlBodyToReplace, entryEntryName, index, entryStartPosition, entryEndPosition); } } // htmlBodyToReplace = replaceImgTag(htmlBodyToReplace); fileContentStr = fileContentStr.replace(htmlBody, htmlBodyToReplace); if (Optionals.cssStatus == CssStatus.DISTRIBUTE) { fileContentStr = dissolveStyleTag(fileContentStr); } bookSection.setSectionContent(fileContentStr); return bookSection; } private BookSection prepareTrimmedBookSection(NavPoint entryNavPoint, int index) throws ReadingException, OutOfPagesException { BookSection bookSection = new BookSection(); String entryName = entryNavPoint.getEntryName(); int bodyTrimStartPosition = entryNavPoint.getBodyTrimStartPosition(); int bodyTrimEndPosition = entryNavPoint.getBodyTrimEndPosition(); // Will be calculated on the first attempt. // logger.log(Severity.info, "index: " + index + ", entryName: " + entryName + ", bodyTrimStartPosition: " + bodyTrimStartPosition + ", bodyTrimEndPosition: " // + bodyTrimEndPosition + ", entryOpenedTags: " + entryOpenedTags + ", entryClosingTags: " + entryClosingTags); String fileContent = readFileContent(entryName); String htmlBody = getHtmlBody(fileContent); String htmlBodyToReplace = null; if (bodyTrimEndPosition == 0) { // Not calculated before. String nextAnchor = getNextAnchor(index, entryName); if (nextAnchor != null) { // Next anchor is available in the same file. It may be the next stop for the content. String nextAnchorHtml = convertAnchorToHtml(nextAnchor); int anchorIndex = htmlBody.indexOf(nextAnchorHtml); if (anchorIndex != -1 && bodyTrimStartPosition <= anchorIndex) { while (htmlBody.charAt(anchorIndex) != Constants.TAG_OPENING) { // Getting just before anchor html. anchorIndex--; } bodyTrimEndPosition = anchorIndex; } else { // NextAnchor not found in the htmlContent. Invalidate it by removing it from navPoints and search for the next one. bodyTrimEndPosition = getNextAvailableAnchorIndex(index, entryName, bodyTrimStartPosition, htmlBody); } } int calculatedTrimEndPosition = calculateTrimEndPosition(entryName, htmlBody, bodyTrimStartPosition, bodyTrimEndPosition); if (calculatedTrimEndPosition != -1) { // Trimming again if needed. bodyTrimEndPosition = calculatedTrimEndPosition; htmlBodyToReplace = htmlBody.substring(bodyTrimStartPosition, bodyTrimEndPosition); NavPoint nextEntryNavPoint = new NavPoint(); nextEntryNavPoint.setTypeCode(2); nextEntryNavPoint.setEntryName(entryName); nextEntryNavPoint.setBodyTrimStartPosition(bodyTrimEndPosition); getToc().getNavMap().getNavPoints().add(index + 1, nextEntryNavPoint); getToc().getNavMap().getNavPoints().get(index).setBodyTrimEndPosition(bodyTrimEndPosition); // Sets endPosition to avoid calculating again. } else { htmlBodyToReplace = getNonTrimmedHtmlBody(index, htmlBody, bodyTrimStartPosition, bodyTrimEndPosition, entryName); } } else { // Calculated before. htmlBodyToReplace = htmlBody.substring(bodyTrimStartPosition, bodyTrimEndPosition); // bodyTrimEndPosition may be zero? } if (Optionals.cssStatus == CssStatus.OMIT) { String modifiedHtmlBody = replaceTableTag(entryName, htmlBody, htmlBodyToReplace, bodyTrimStartPosition, bodyTrimEndPosition); htmlBodyToReplace = appendIncompleteTags(modifiedHtmlBody, htmlBodyToReplace, entryName, index, bodyTrimStartPosition, bodyTrimEndPosition); } else { htmlBodyToReplace = appendIncompleteTags(htmlBody, htmlBodyToReplace, entryName, index, bodyTrimStartPosition, bodyTrimEndPosition); } // htmlBodyToReplace = replaceImgTag(htmlBodyToReplace); if (Optionals.isIncludingTextContent) { bookSection.setSectionTextContent(getOnlyTextContent(entryName, htmlBody, bodyTrimStartPosition, bodyTrimEndPosition)); } fileContent = fileContent.replace(htmlBody, htmlBodyToReplace); if (Optionals.cssStatus == CssStatus.DISTRIBUTE) { fileContent = dissolveStyleTag(fileContent); } bookSection.setSectionContent(fileContent); if (this.lastBookSectionInfo != null) { bookSection.setExtension(this.lastBookSectionInfo.getExtension()); bookSection.setLabel(this.lastBookSectionInfo.getLabel()); bookSection.setMediaType(this.lastBookSectionInfo.getMediaType()); } return bookSection; } private String getNonTrimmedHtmlBody(int index, String htmlBody, int trimStartPosition, int trimEndPosition, String entryName) { String htmlBodyToReplace = null; if (trimEndPosition == 0) { htmlBodyToReplace = htmlBody.substring(trimStartPosition); } else { htmlBodyToReplace = htmlBody.substring(trimStartPosition, trimEndPosition); } getToc().getNavMap().getNavPoints().get(index).setBodyTrimStartPosition(trimStartPosition); getToc().getNavMap().getNavPoints().get(index).setBodyTrimEndPosition(trimEndPosition); getToc().getNavMap().getNavPoints().get(index).setEntryName(entryName); return htmlBodyToReplace; } /* * This method calculates and keeps every tag indices of the given entry file. Later on, these calculations will be used when trimming the entry. * * e.g. If the open-close tag indices are in the same trimmed part; tag will be closed there and won't disturb the next trimmed part. * * If the open-close tag indices are not in the same trimmed part; tag will be closed at the end of the current trimmed part, and opened in the next trimmed part. */ private void calculateEntryTagPositions(String entryName, String htmlBody) { List<Tag> openedTags = null; ListIterator<Tag> listIterator = null; boolean isPossiblyTagOpened = false; StringBuilder possiblyTag = new StringBuilder(); Pattern pattern = Pattern.compile(Constants.HTML_TAG_PATTERN); Matcher matcher; for (int i = 0; i < htmlBody.length(); i++) { if (htmlBody.charAt(i) == Constants.TAG_OPENING) { // Tag might have been opened. isPossiblyTagOpened = true; possiblyTag.setLength(0); // In case of double occurence of '<' start from the next found tag opening; e.g. '< <p>'. } else if (htmlBody.charAt(i) == Constants.TAG_CLOSING) { // Tag might have been closed. possiblyTag.append(Constants.TAG_CLOSING); if (htmlBody.charAt(i - 1) != '/') { // Not an empty tag. String tagStr = possiblyTag.toString(); matcher = pattern.matcher(tagStr); if (matcher.matches()) { if (tagStr.charAt(1) == '/') { // Closing tag. Match it with the last open tag with the same name. String tagName = getFullTagName(tagStr, false); listIterator = openedTags.listIterator(openedTags.size()); while (listIterator.hasPrevious()) { Tag openedTag = listIterator.previous(); if (openedTag.getTagName().equals(tagName)) { // Found the last open tag with the same name. addEntryTagPosition(entryName, openedTag.getFullTagName(), openedTag.getOpeningTagStartPosition(), i - tagName.length() - 1); listIterator.remove(); break; } } } else { // Opening tag. if (openedTags == null) { openedTags = new ArrayList<>(); } String fullTagName = getFullTagName(tagStr, true); String tagName = getTagName(fullTagName); Tag tag = new Tag(); tag.setTagName(tagName); tag.setFullTagName(fullTagName); tag.setOpeningTagStartPosition(i - fullTagName.length()); openedTags.add(tag); } } } else { // Empty tag. String tagStr = possiblyTag.toString(); matcher = pattern.matcher(tagStr); if (matcher.matches()) { int closingBracletIndex = tagStr.indexOf(Constants.TAG_CLOSING); String tagName = tagStr.substring(1, closingBracletIndex - 1); addEntryTagPosition(entryName, tagName, i - tagName.length() - 1, i - tagName.length() - 1); } } possiblyTag.setLength(0); isPossiblyTagOpened = false; } if (isPossiblyTagOpened) { possiblyTag.append(htmlBody.charAt(i)); } } } private void addEntryTagPosition(String entryName, String fullTagName, int openingPosition, int closingPosition) { Tag tag = new Tag(); tag.setOpeningTagStartPosition(openingPosition); tag.setClosingTagStartPosition(closingPosition); tag.setFullTagName(fullTagName); tag.setTagName(getTagName(fullTagName)); if (this.entryTagPositions.containsKey(entryName)) { List<Tag> tagList = this.entryTagPositions.get(entryName); int index = tagList.size(); while (index > 0 && tagList.get(index - 1).getOpeningTagStartPosition() > openingPosition) { index--; } this.entryTagPositions.get(entryName).add(index, tag); } else { List<Tag> tagList = new ArrayList<>(); tagList.add(tag); this.entryTagPositions.put(entryName, tagList); } } private String getFullTagName(String tag, boolean isOpeningTag) { int closingBracletIndex = tag.indexOf(Constants.TAG_CLOSING); if (isOpeningTag) { return tag.substring(1, closingBracletIndex); } else { return tag.substring(2, closingBracletIndex); } } private String getTagName(String fullTagName) { if (fullTagName.contains(" ")) { fullTagName = fullTagName.trim(); int endIndex = 1; while (fullTagName.length() > endIndex && fullTagName.charAt(endIndex) != ' ') { endIndex++; } return fullTagName.substring(0, endIndex); } else { return fullTagName; } } // TODO: Similar functionality happens in the prepareBookSection method. Merge them into this. private int getNextAvailableAnchorIndex(int index, String entryName, int bodyTrimStartPosition, String htmlBody) throws ReadingException, OutOfPagesException { getToc().getNavMap().getNavPoints().remove(++index); // Removing the nextAnchor from navPoints; 'cause it's already not found. int markedNavPoints = 0; int anchorIndex = -1; boolean isNextAnchorFound = false; // Next available anchor should be the next starting point. while (index < getToc().getNavMap().getNavPoints().size()) { // Looping until next anchor is found. NavPoint possiblyNextNavPoint = getNavPoint(index); String[] possiblyNextEntryNameLabel = findEntryNameAndLabel(possiblyNextNavPoint); String possiblyNextEntryName = possiblyNextEntryNameLabel[0]; if (possiblyNextEntryName != null) { String fileName = ContextHelper.encodeToUtf8(ContextHelper.getTextAfterCharacter(entryName, Constants.SLASH)); if (possiblyNextEntryName.startsWith(fileName) && possiblyNextEntryName.replace(fileName, "").startsWith("%23")) { String anchor = possiblyNextEntryName.replace(fileName, ""); String anchorHtml = convertAnchorToHtml(anchor); anchorIndex = htmlBody.indexOf(anchorHtml); if (anchorIndex != -1) { while (htmlBody.charAt(anchorIndex) != Constants.TAG_OPENING) { // Getting just before anchor html. anchorIndex--; } if (bodyTrimStartPosition <= anchorIndex) { // getToc().getNavMap().getNavPoints().get(index).setBodyTrimEndPosition(anchorIndex); // Sets endPosition to avoid calculating again. isNextAnchorFound = true; break; } } } else { // TODO: Next content is not the same file as the current one. Anchors are broken. Navigate to the next file. break; } } getToc().getNavMap().getNavPoints().get(index).setMarkedToDelete(true); markedNavPoints++; index++; } if (markedNavPoints != 0) { for (Iterator<NavPoint> iterator = getToc().getNavMap().getNavPoints().iterator(); iterator.hasNext();) { NavPoint navPointToDelete = iterator.next(); if (navPointToDelete.isMarkedToDelete()) { iterator.remove(); if (--markedNavPoints == 0) { break; } } } } if (isNextAnchorFound) { return anchorIndex; } else { return 0; } } private String prepareOpeningTags(List<Tag> openedTags) { StringBuilder openingTagsBuilder = new StringBuilder(); for (ListIterator<Tag> iterator = openedTags.listIterator(); iterator.hasNext();) { openingTagsBuilder.append(Constants.TAG_OPENING).append(iterator.next().getFullTagName()).append(Constants.TAG_CLOSING); } return openingTagsBuilder.toString(); } private String prepareClosingTags(List<Tag> openedTags) { StringBuilder closingTagsBuilder = new StringBuilder(); for (ListIterator<Tag> iterator = openedTags.listIterator(openedTags.size()); iterator.hasPrevious();) { closingTagsBuilder.append(Constants.TAG_START + iterator.previous().getTagName() + Constants.TAG_CLOSING); } return closingTagsBuilder.toString(); } private String prepareOpeningTag(Tag openedTag) { return Constants.TAG_OPENING + openedTag.getFullTagName() + Constants.TAG_CLOSING; } private int calculateTrimEndPosition(String entryName, String htmlBody, int trimStartPosition, int trimEndPos) { int trimEndPosition = (trimEndPos != 0 && (trimEndPos - trimStartPosition) < Optionals.maxContentPerSection) ? trimEndPos : trimStartPosition + Optionals.maxContentPerSection; int htmlBodyLength = htmlBody.length(); // Don't need to trim. HtmlBody with tags are already below limit. if (htmlBodyLength < trimEndPosition || (trimEndPosition - trimStartPosition) < Optionals.maxContentPerSection) { return -1; } List<Tag> tagStartEndPositions = this.entryTagPositions.get(entryName); int loopCount = 0; int lastTagsLength = 0; while (true) { int tagsLength = 0; for (Tag tag : tagStartEndPositions) { if (tag.getOpeningTagStartPosition() > trimEndPosition) { break; } if (tag.getOpeningTagStartPosition() == tag.getClosingTagStartPosition()) { if (tag.getOpeningTagStartPosition() > trimStartPosition && tag.getOpeningTagStartPosition() < trimEndPosition) { // Empty Tag. tagsLength += tag.getFullTagName().length() + 3; // < /> } } else { if (tag.getOpeningTagStartPosition() > trimStartPosition && tag.getOpeningTagStartPosition() < trimEndPosition) { // Opening tag. tagsLength += tag.getFullTagName().length() + 2; // < > } if (tag.getClosingTagStartPosition() > trimStartPosition && tag.getClosingTagStartPosition() < trimEndPosition) { // Closing tag. tagsLength += tag.getTagName().length() + 3; // < /> } } } if (lastTagsLength == tagsLength) { // Tags length isn't greater than the last one. No need to keep going. if (loopCount == 0) { // Returned on the first try. Don't need to trim. HtmlBody without tags are already below limit. if (tagsLength == 0 && htmlBodyLength > trimEndPosition) { // If there are no tags in the trimmed part. break; } return -1; } else { break; } } trimEndPosition += tagsLength; // If trimEndPosition is over the htmlBody's index; then htmlBody is already within limits. No need to trim. if (trimEndPosition >= htmlBodyLength) { return -1; } if (((trimEndPosition - trimStartPosition) - tagsLength) >= Optionals.maxContentPerSection) { break; } lastTagsLength = tagsLength; loopCount++; } // TODO: Regex to find table tags like: <table(*.?)>[</table>|</>] // TODO: This may break the maxContentPerSection rule. Check if the table content will exceed the limit. int tableStartIndex = htmlBody.indexOf(Constants.TAG_TABLE_START, trimStartPosition); // If interval has table, don't break the table. if (tableStartIndex != -1 && tableStartIndex < trimEndPosition) { int tableEndIndex = htmlBody.indexOf(Constants.TAG_TABLE_END, tableStartIndex); if (tableEndIndex != -1) { trimEndPosition = tableEndIndex + Constants.TAG_TABLE_END.length(); } else { trimEndPosition = findEligibleEndPosition(tagStartEndPositions, htmlBody, trimEndPosition); } } else { trimEndPosition = findEligibleEndPosition(tagStartEndPositions, htmlBody, trimEndPosition); } return trimEndPosition; } // Checks if we are in an html tag. If so, move forward or backward until the tag is over. Else, move backwards until we hit the blank. private int findEligibleEndPosition(List<Tag> tagStartEndPositions, String htmlBody, int trimEndPosition) { boolean isMovedToEndOfTag = false; for (Tag tag : tagStartEndPositions) { if (tag.getOpeningTagStartPosition() > trimEndPosition) { break; } if (tag.getOpeningTagStartPosition() == tag.getClosingTagStartPosition()) { // Empty tag. // Inside an empty tag. if (tag.getOpeningTagStartPosition() < trimEndPosition && (tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 2) > trimEndPosition) { while (htmlBody.charAt(trimEndPosition) != Constants.TAG_CLOSING) { trimEndPosition++; } trimEndPosition++; isMovedToEndOfTag = true; break; } } else { // Inside an opening tag. if (tag.getOpeningTagStartPosition() < trimEndPosition && (tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 1) > trimEndPosition) { while (htmlBody.charAt(trimEndPosition) != Constants.TAG_OPENING) { trimEndPosition--; } // trimEndPosition--; isMovedToEndOfTag = true; break; } // Inside a closing tag. if (tag.getClosingTagStartPosition() < trimEndPosition && (tag.getClosingTagStartPosition() + tag.getTagName().length() + 2) > trimEndPosition) { while (htmlBody.charAt(trimEndPosition) != Constants.TAG_CLOSING) { trimEndPosition++; } trimEndPosition++; isMovedToEndOfTag = true; break; } } } if (!isMovedToEndOfTag) { // To avoid dividing the words in half. while (htmlBody.charAt(trimEndPosition) != ' ') { trimEndPosition--; // We may have hit a tag. if (htmlBody.charAt(trimEndPosition) == Constants.TAG_CLOSING) { trimEndPosition++; break; } else if (htmlBody.charAt(trimEndPosition) == Constants.TAG_OPENING) { break; } } } return trimEndPosition; } private String getNextAnchor(int index, String entryName) throws ReadingException, OutOfPagesException { if (getToc().getNavMap().getNavPoints().size() > (index + 1)) { NavPoint nextNavPoint = getNavPoint(index + 1); if (nextNavPoint.getTypeCode() != 2) { // Real navPoint. Only real navPoints are anchored. TODO: Change these with constants. String[] nextEntryLabel = findEntryNameAndLabel(nextNavPoint); String nextHref = nextEntryLabel[0]; if (nextHref != null) { String fileName = ContextHelper.encodeToUtf8(ContextHelper.getTextAfterCharacter(entryName, Constants.SLASH)); if (nextHref.startsWith(fileName) && nextHref.replace(fileName, "").startsWith("%23")) { // Both anchors are in the same file. return nextHref.replace(fileName, ""); } } } } return null; } private boolean isFileReadFirstTime(int index, String entryName) throws ReadingException, OutOfPagesException { if ((index - 1) >= 0) { NavPoint prevNavPoint = getNavPoint(index - 1); if (prevNavPoint.getTypeCode() == 2) { return false; } String prevHref = findEntryNameAndLabel(prevNavPoint)[0]; if (prevHref != null) { String fileName = ContextHelper.encodeToUtf8(ContextHelper.getTextAfterCharacter(entryName, Constants.SLASH)); if (prevHref.startsWith(fileName)) { // Same content as previous, not reading for the first time. (&& prevHref.replace(fileName, "").startsWith("%23")) return false; } } } return true; } private String[] findEntryNameAndLabel(NavPoint navPoint) throws ReadingException { if (navPoint.getContentSrc() != null) { return new String[] { navPoint.getContentSrc(), navPoint.getNavLabel() }; } throw new ReadingException("NavPoint content is not found in epub content."); } // TODO: This operation is getting expensive and expensive. fileContent could be held in cache; if the entry is same. Maybe a map with one element -> <entryName, fileContent> // If map doesn't contain that entryName -> then this method can be used. private String readFileContent(String entryName) throws ReadingException { ZipFile epubFile = null; try { epubFile = new ZipFile(zipFilePath); ZipEntry zipEntry = epubFile.getEntry(entryName); InputStream inputStream = epubFile.getInputStream(zipEntry); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8")); StringBuilder fileContent = new StringBuilder(); try { String line; while ((line = bufferedReader.readLine()) != null) { fileContent.append(line).append(" "); } } finally { bufferedReader.close(); } String fileContentStr = fileContent.toString(); if (Optionals.cssStatus != CssStatus.OMIT) { fileContentStr = replaceCssLinkWithActualCss(epubFile, fileContentStr); } return fileContentStr; } catch (IOException e) { e.printStackTrace(); throw new ReadingException("IOException while reading content " + entryName + e.getMessage()); } catch (ParserConfigurationException e) { e.printStackTrace(); throw new ReadingException("ParserConfigurationException while reading content " + entryName + e.getMessage()); } catch (SAXException e) { e.printStackTrace(); throw new ReadingException("SAXException while reading content " + entryName + e.getMessage()); } catch (TransformerException e) { e.printStackTrace(); throw new ReadingException("TransformerException while reading content " + entryName + e.getMessage()); } finally { try { if (epubFile != null) { epubFile.close(); } } catch (IOException e) { e.printStackTrace(); throw new ReadingException("Error closing ZipFile: " + e.getMessage()); } } } private String getHtmlBody(String htmlContent) throws ReadingException { int startOfBody = htmlContent.lastIndexOf(Constants.TAG_BODY_START); int endOfBody = htmlContent.lastIndexOf(Constants.TAG_BODY_END); int bodyStartEndIndex = startOfBody + Constants.TAG_BODY_START.length(); while (htmlContent.charAt(bodyStartEndIndex) != Constants.TAG_CLOSING) { bodyStartEndIndex++; } if (startOfBody != -1 && endOfBody != -1) { return htmlContent.substring(bodyStartEndIndex + 1, endOfBody); } else { throw new ReadingException("Exception while getting book section : Html body tags not found."); } } // Starts from current anchor, reads until the next anchor starts. private int[] getAnchorsInterval(String htmlBody, String currentAnchor, String nextAnchor) throws ReadingException { int startOfCurrentAnchor = -1; int startOfNextAnchor = -1; if (currentAnchor != null && !currentAnchor.equals("")) { startOfCurrentAnchor = htmlBody.indexOf(currentAnchor); } if (nextAnchor != null && !nextAnchor.equals("")) { startOfNextAnchor = htmlBody.indexOf(nextAnchor); } if (startOfCurrentAnchor != -1) { while (htmlBody.charAt(startOfCurrentAnchor) != Constants.TAG_OPENING) { startOfCurrentAnchor--; } } else { startOfCurrentAnchor = 0; } if (startOfNextAnchor != -1) { while (htmlBody.charAt(startOfNextAnchor) != Constants.TAG_OPENING) { startOfNextAnchor--; } } else { startOfNextAnchor = 0; } return new int[] { startOfCurrentAnchor, startOfNextAnchor }; // throw new ReadingException("Exception while trimming anchored parts : Defined Anchors not found."); } private String convertAnchorToHtml(String anchor) throws ReadingException { // #Page_1 to id="Page_1" converter if (anchor == null) { return null; } if (anchor.startsWith("#")) { // Anchors should start with # return "id=\"" + anchor.substring(1) + "\""; } else if (anchor.startsWith("%23")) { // Or UTF-8 equivalent of # return "id=\"" + anchor.substring(3) + "\""; } else { throw new ReadingException("Anchor does not start with #"); } } private String getMediaType(String fileName) { List<XmlItem> manifestItems = getPackage().getManifest().getXmlItemList(); for (int i = 0; i < manifestItems.size(); i++) { if (manifestItems.get(i).getAttributes().containsValue(fileName)) { if (manifestItems.get(i).getAttributes().containsKey("media-type")) { return manifestItems.get(i).getAttributes().get("media-type"); } } } return null; } // Distributing the css parts in the style tag to the belonging html tags. private String dissolveStyleTag(String trimmedFileContent) throws ReadingException { Pattern cssPattern = Pattern.compile("<style(.*?)>(.*?)</style>"); Matcher matcher = cssPattern.matcher(trimmedFileContent); while (matcher.find()) { // There may be multiple style tags. String styleTagStr = matcher.group(2); Map<String, String> cssMap = getCssMap(styleTagStr); String htmlBody = getHtmlBody(trimmedFileContent); String htmlBodyToReplace = putCssIntoTags(cssMap, htmlBody); trimmedFileContent = trimmedFileContent.replace(htmlBody, htmlBodyToReplace); trimmedFileContent = trimmedFileContent.replace("<style" + styleTagStr + "</style>", ""); } return trimmedFileContent; } private Map<String, String> getCssMap(String cssfileContent) { Map<String, String> cssMap = new HashMap<>(); Pattern cssPattern = Pattern.compile("\\{(.*?)\\}"); Matcher matcher = cssPattern.matcher(cssfileContent); while (matcher.find()) { String cssValue = matcher.group(1); int indexOfCurlyStart = matcher.start(); int indexOfCssNameStart = indexOfCurlyStart - 1; StringBuilder cssNameBuilder = new StringBuilder(); String cssName = null; while (indexOfCssNameStart >= 0) { // TODO: There may be multiple css names pointing to one cssValue e.g. .legalnotice p { text-align: left; } OR .legalnotice, p { text-align: left; } if (cssfileContent.charAt(indexOfCssNameStart) == '}' || cssfileContent.charAt(indexOfCssNameStart) == '/') { String builtCssName = cssNameBuilder.toString().trim(); if (builtCssName.length() > 0) { cssName = cssNameBuilder.reverse().toString().trim(); break; } } cssNameBuilder.append(cssfileContent.charAt(indexOfCssNameStart)); indexOfCssNameStart--; } List<String> cssNameList = null; // Seperate them here by ' ', ',' '>' (known seperators) String seperator = null; if (cssName.contains(",")) { seperator = ","; } else if (cssName.contains(">")) { seperator = ">"; } else if (cssName.contains(" ")) { seperator = " "; } if (seperator != null) { cssNameList = Arrays.asList(cssName.split(seperator)); } if (cssNameList == null) { // Has one css name if (cssMap.containsKey(cssName)) { cssMap.put(cssName, cssMap.get(cssName) + " " + cssValue); } else { cssMap.put(cssName, cssValue); } } else { // Has multiple css names for (String cssNameItem : cssNameList) { if (cssMap.containsKey(cssNameItem)) { cssMap.put(cssNameItem, cssMap.get(cssNameItem) + " " + cssValue); } else { cssMap.put(cssNameItem, cssValue); } } } } return cssMap; } // TODO: Search htmlBody tags by cssName and put cssValues where they found. // e.g. div.mert, "margin-left:30px; padding-top:25px" // <div class="mert"> -> <div style="margin-left:30px; padding-top:25px"> private String putCssIntoTags(Map<String, String> cssMap, String trimmedHtmlBody) { for (Map.Entry<String, String> cssEntry : cssMap.entrySet()) { String tagName = cssEntry.getKey(); String className = null; int classNameLength = 0; int dotIndex = cssEntry.getKey().indexOf("."); if (dotIndex > 0) { // e.g. div.mert className = cssEntry.getKey().substring(dotIndex + 1); classNameLength = className.length(); tagName = cssEntry.getKey().substring(0, dotIndex); } int startTagIndex = trimmedHtmlBody.indexOf("<" + tagName); while (startTagIndex != -1) { int endTagIndex = startTagIndex; while (trimmedHtmlBody.charAt(endTagIndex) != '>') { endTagIndex++; } endTagIndex++; // Not an empty tag and big enough for class attribute. if (trimmedHtmlBody.charAt(endTagIndex - 1) != '/' && (endTagIndex - startTagIndex) > (5 + classNameLength)) { String tag = trimmedHtmlBody.substring(startTagIndex, endTagIndex); if (className == null || tag.contains(className)) { // Remove redundant class. if (className != null) { int classEndIndex = tag.indexOf(className); int classStartIndex = classEndIndex - 1; while (tag.charAt(classStartIndex) != 'c') { classStartIndex--; } tag = tag.substring(0, classStartIndex) + tag.substring(classEndIndex + classNameLength + 1, tag.length()); } int styleIndex = tag.indexOf("style=\""); String tagToReplace = null; if (styleIndex != -1) { // Already has a style tag. Put the value into it. tagToReplace = tag.substring(0, styleIndex + 6) + cssEntry.getValue() + tag.substring(styleIndex + 6, tag.length()); } else { int insertStyleIndex = 1 + tagName.length() + 1; // '<' and ' ' tagToReplace = tag.substring(0, insertStyleIndex) + "style=\"" + cssEntry.getValue() + "\" " + tag.substring(insertStyleIndex, tag.length()); } trimmedHtmlBody = trimmedHtmlBody.replaceFirst(tag, tagToReplace); } } startTagIndex = trimmedHtmlBody.indexOf("<" + tagName, startTagIndex + 1); } } return trimmedHtmlBody; } private String replaceCssLinkWithActualCss(ZipFile epubFile, String htmlContent) throws IOException, ParserConfigurationException, ReadingException, SAXException, TransformerException { // <link rel="stylesheet" type="text/css" href="docbook-epub.css"/> Pattern linkTagPattern = Pattern.compile("<link.*?/>|<link.*?</link>"); Pattern hrefPattern = Pattern.compile("href=\"(.*?)\""); Matcher linkMatcher = linkTagPattern.matcher(htmlContent); while (linkMatcher.find()) { String linkTag = linkMatcher.group(0); Matcher hrefMatcher = hrefPattern.matcher(linkTag); if (hrefMatcher.find()) { String cssHref = ContextHelper.getTextAfterCharacter(hrefMatcher.group(1), Constants.SLASH); if (cssHref.endsWith(".css")) { // Should we check for its type as well? text/css if (nonExistingHrefList != null && nonExistingHrefList.contains(cssHref)) { // logger.log(Logger.Severity.warning, "Already not found on the first try. Skipping the search for(Css) : " + cssHref); htmlContent = htmlContent.replace(linkTag, ""); } else { boolean isCssFileFound = false; for (int i = 0; i < getEntryNames().size(); i++) { String entryName = getEntryNames().get(i); String fileName = ContextHelper.encodeToUtf8(ContextHelper.getTextAfterCharacter(entryName, Constants.SLASH)); if (cssHref.equals(fileName)) { // css exists. isCssFileFound = true; ZipEntry zipEntry = epubFile.getEntry(entryName); InputStream zipEntryInputStream = epubFile.getInputStream(zipEntry); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(zipEntryInputStream)); StringBuilder fileContent = new StringBuilder(); fileContent.append("<style type=\"text/css\">"); try { String line; while ((line = bufferedReader.readLine()) != null) { fileContent.append(line); } } finally { bufferedReader.close(); } fileContent.append("</style>"); htmlContent = htmlContent.replace(linkTag, fileContent.toString()); break; } } if (!isCssFileFound) { logger.log(Logger.Severity.warning, "Referenced css file not found!"); if (nonExistingHrefList == null) { nonExistingHrefList = new ArrayList<>(); } nonExistingHrefList.add(cssHref); htmlContent = htmlContent.replace(cssHref, ""); } } } } } return htmlContent; } private String replaceImgTag(String htmlBody) throws ReadingException { Pattern imgTagPattern = Pattern.compile("<img.*?/>|<img.*?</img>"); Pattern srcPattern = Pattern.compile("src=\"(.*?)\""); Matcher imgTagMatcher = imgTagPattern.matcher(htmlBody); while (imgTagMatcher.find()) { String imgPart = imgTagMatcher.group(0); Matcher srcMatcher = srcPattern.matcher(imgPart); if (srcMatcher.find()) { String srcHref = ContextHelper.getTextAfterCharacter(srcMatcher.group(1), Constants.SLASH); String encodedSrcHref = ContextHelper.encodeToUtf8(srcHref); if (nonExistingHrefList != null && nonExistingHrefList.contains(srcHref)) { // logger.log(Logger.Severity.warning, "Already not found on the first try. Skipping the search for(Img) : " + srcMatcher); htmlBody = htmlBody.replace(imgPart, ""); } else { boolean isImageFileFound = false; for (int i = 0; i < getEntryNames().size(); i++) { String entryName = getEntryNames().get(i); String fileName = ContextHelper.encodeToUtf8(ContextHelper.getTextAfterCharacter(entryName, Constants.SLASH)); if (encodedSrcHref.equals(fileName)) { // image exists. isImageFileFound = true; ZipFile epubFile = null; try { String extension = ContextHelper.getTextAfterCharacter(fileName, Constants.DOT); epubFile = new ZipFile(this.zipFilePath); ZipEntry zipEntry = epubFile.getEntry(entryName); InputStream zipEntryInputStream = epubFile.getInputStream(zipEntry); // Convert inputStream to Base64Binary. byte[] imageAsBytes = ContextHelper.convertIsToByteArray(zipEntryInputStream); byte[] imageAsBase64 = Base64.encodeBase64(imageAsBytes); String imageContent = new String(imageAsBase64); String src = "data:image/" + extension + ";base64," + imageContent; htmlBody = htmlBody.replace(srcHref, src); break; } catch (IOException e) { e.printStackTrace(); } finally { if (epubFile != null) { try { epubFile.close(); } catch (IOException e) { e.printStackTrace(); } } } } } if (!isImageFileFound) { logger.log(Logger.Severity.warning, "Referenced image file not found: " + srcHref); if (nonExistingHrefList == null) { nonExistingHrefList = new ArrayList<>(); } nonExistingHrefList.add(srcHref); htmlBody = htmlBody.replace(imgPart, ""); } } } } return htmlBody; } // Warning: May devour anchors. private String replaceTableTag(String entryName, String htmlBody, String htmlBodyToReplace, int trimStartPosition, int trimEndPosition) { Pattern tableTagPattern = Pattern.compile("<table.*?>", Pattern.DOTALL); Matcher tableTagMatcher = tableTagPattern.matcher(htmlBodyToReplace); if (tableTagMatcher.find()) { if (entryTagPositions == null || !entryTagPositions.containsKey(entryName)) { if (entryTagPositions == null) { entryTagPositions = new HashMap<>(); } calculateEntryTagPositions(entryName, htmlBody); } List<Tag> tagStartEndPositions = this.entryTagPositions.get(entryName); List<Tag> tableTagList = new ArrayList<>(); for (Tag tag : tagStartEndPositions) { if (tag.getOpeningTagStartPosition() > trimEndPosition) { break; } if (tag.getTagName().equals("table")) { if (tag.getOpeningTagStartPosition() != tag.getClosingTagStartPosition()) { // Not an empty table tag. if (tag.getOpeningTagStartPosition() > trimStartPosition && tag.getOpeningTagStartPosition() < trimEndPosition) { // Opening tag is within scope. tableTagList.add(tag); // if (tag.getClosingTagStartPosition() > trimStartPosition && tag.getClosingTagStartPosition() < trimEndPosition) { // Closing tag is also withing scope. // // } } } } } // Remove nested tables. List<Tag> smallerTableTagList = new ArrayList<>(); for (int i = 0; i < tableTagList.size(); i++) { int tag1StartPosition = tableTagList.get(i).getOpeningTagStartPosition(); int tag1EndPosition = tableTagList.get(i).getClosingTagStartPosition(); for (int j = i + 1; j < tableTagList.size(); j++) { int tag2StartPosition = tableTagList.get(j).getOpeningTagStartPosition(); int tag2EndPosition = tableTagList.get(j).getClosingTagStartPosition(); if (tag1StartPosition > tag2StartPosition && tag1EndPosition < tag2EndPosition) { smallerTableTagList.add(tableTagList.get(i)); } else if (tag2StartPosition > tag1StartPosition && tag2EndPosition < tag1EndPosition) { smallerTableTagList.add(tableTagList.get(j)); } } } tableTagList.removeAll(smallerTableTagList); return getOnlyTextContent(entryName, htmlBody, trimStartPosition, trimEndPosition, tableTagList); } return htmlBody; } private String getOnlyTextContent(String entryName, String htmlBody, int trimStartPosition, int trimEndPosition, List<Tag> tableTagPositions) { List<Tag> tagStartEndPositions = this.entryTagPositions.get(entryName); // List<String> stringsToRemove = new ArrayList<>(); for (int i = 0; i < tableTagPositions.size(); i++) { int tableStartPosition = tableTagPositions.get(i).getOpeningTagStartPosition(); int tableEndPosition = tableTagPositions.get(i).getClosingTagStartPosition(); for (Tag tag : tagStartEndPositions) { if (tag.getOpeningTagStartPosition() > tableEndPosition) { break; } // Exclude img tags to save images in table tag. // if(tagInfo.getTagName().equals("img")) { // continue; // } if (tag.getOpeningTagStartPosition() == tag.getClosingTagStartPosition()) { // Empty Tag if (tag.getOpeningTagStartPosition() > tableStartPosition && tag.getOpeningTagStartPosition() < tableEndPosition) { htmlBody = htmlBody.substring(0, tag.getOpeningTagStartPosition() - 1) + Constants.STRING_MARKER + htmlBody.substring(tag.getOpeningTagStartPosition() - 1 + Constants.STRING_MARKER.length(), tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 2 - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER + htmlBody.substring(tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 2, htmlBody.length()); // stringsToRemove.add(Constants.STRING_MARKER + htmlBody.substring(tag.getOpeningTagStartPosition() - 1 + Constants.STRING_MARKER.length(), // tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 2 - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER); tag.setOmitted(true); } } else { if (tag.getOpeningTagStartPosition() > tableStartPosition && tag.getOpeningTagStartPosition() < tableEndPosition) { // Opening tag. htmlBody = htmlBody.substring(0, tag.getOpeningTagStartPosition() - 1) + Constants.STRING_MARKER + htmlBody.substring(tag.getOpeningTagStartPosition() - 1 + Constants.STRING_MARKER.length(), tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 1 - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER + htmlBody.substring(tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 1, htmlBody.length()); // stringsToRemove.add(Constants.STRING_MARKER + htmlBody.substring(tag.getOpeningTagStartPosition() - 1 + Constants.STRING_MARKER.length(), // tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 1 - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER); tag.setOmitted(true); } if (tag.getClosingTagStartPosition() > tableStartPosition && tag.getClosingTagStartPosition() < tableEndPosition) { // Closing tag. htmlBody = htmlBody.substring(0, tag.getClosingTagStartPosition() - 1) + Constants.STRING_MARKER + htmlBody.substring(tag.getClosingTagStartPosition() - 1 + Constants.STRING_MARKER.length(), tag.getClosingTagStartPosition() + tag.getTagName().length() + 2 - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER + htmlBody.substring(tag.getClosingTagStartPosition() + tag.getTagName().length() + 2, htmlBody.length()); // stringsToRemove.add(Constants.STRING_MARKER + htmlBody.substring(tag.getClosingTagStartPosition() - 1 + Constants.STRING_MARKER.length(), // tag.getClosingTagStartPosition() + tag.getTagName().length() + 2 - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER); tag.setOmitted(true); } } } } // htmlBody = htmlBody.substring(trimStartPosition, trimEndPosition); // TODO: If stringToRemove contains tr tag, then replace it with br. // for (String stringToRemove : stringsToRemove) { // htmlBody = htmlBody.replace(stringToRemove, ""); // } return htmlBody; } // Removes all the tags from htmlBody and returns it. private String getOnlyTextContent(String entryName, String htmlBody, int trimStartPosition, int trimEndPosition) { List<Tag> tagStartEndPositions = this.entryTagPositions.get(entryName); List<String> stringsToRemove = new ArrayList<>(); if (trimEndPosition == 0) { trimEndPosition = htmlBody.length(); } for (Tag tag : tagStartEndPositions) { // This may not work correctly. if (tag.getOpeningTagStartPosition() > trimEndPosition) { break; } if (tag.getOpeningTagStartPosition() == tag.getClosingTagStartPosition()) { // Empty Tag if (tag.getOpeningTagStartPosition() > trimStartPosition && tag.getOpeningTagStartPosition() < trimEndPosition) { htmlBody = htmlBody.substring(0, tag.getOpeningTagStartPosition() - 1) + Constants.STRING_MARKER + htmlBody.substring(tag.getOpeningTagStartPosition() - 1 + Constants.STRING_MARKER.length(), tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 2 - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER + htmlBody.substring(tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 2, htmlBody.length()); stringsToRemove.add(Constants.STRING_MARKER + htmlBody.substring(tag.getOpeningTagStartPosition() - 1 + Constants.STRING_MARKER.length(), tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 2 - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER); } } else { if (tag.getOpeningTagStartPosition() > trimStartPosition && tag.getOpeningTagStartPosition() < trimEndPosition) { // Opening tag. htmlBody = htmlBody.substring(0, tag.getOpeningTagStartPosition() - 1) + Constants.STRING_MARKER + htmlBody.substring(tag.getOpeningTagStartPosition() - 1 + Constants.STRING_MARKER.length(), tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 1 - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER + htmlBody.substring(tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 1, htmlBody.length()); stringsToRemove.add(Constants.STRING_MARKER + htmlBody.substring(tag.getOpeningTagStartPosition() - 1 + Constants.STRING_MARKER.length(), tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 1 - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER); } if (tag.getClosingTagStartPosition() > trimStartPosition && tag.getClosingTagStartPosition() < trimEndPosition) { // Closing tag. htmlBody = htmlBody.substring(0, tag.getClosingTagStartPosition() - 1) + Constants.STRING_MARKER + htmlBody.substring(tag.getClosingTagStartPosition() - 1 + Constants.STRING_MARKER.length(), tag.getClosingTagStartPosition() + tag.getTagName().length() + 2 - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER + htmlBody.substring(tag.getClosingTagStartPosition() + tag.getTagName().length() + 2, htmlBody.length()); stringsToRemove.add(Constants.STRING_MARKER + htmlBody.substring(tag.getClosingTagStartPosition() - 1 + Constants.STRING_MARKER.length(), tag.getClosingTagStartPosition() + tag.getTagName().length() + 2 - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER); } } } htmlBody = htmlBody.substring(trimStartPosition, trimEndPosition); for (String stringToRemove : stringsToRemove) { htmlBody = htmlBody.replace(stringToRemove, ""); } return htmlBody; } // TODO: Save these in navPoints as well avoid calculating again. private String appendIncompleteTags(String htmlBody, String htmlBodyToReplace, String entryName, int index, int trimStartPosition, int trimEndPosition) throws ReadingException { List<Tag> prevOpenedNotClosedYetTags = new ArrayList<>(); // Previously opened in this scope and not yet closed tags in scope. Appending opening and closing tags. List<Tag> openedNotClosedYetTags = new ArrayList<>(); // Opened in this scope and not yet closed tags in scope. Appending only closing tags. List<Tag> prevOpenedClosedTags = new ArrayList<>(); // Previously opened and closed in this scope. Appending only opening tags. List<Tag> currentEntryTags = this.entryTagPositions.get(entryName); trimEndPosition = trimEndPosition == 0 ? htmlBody.length() : trimEndPosition; for (int i = 0; i < currentEntryTags.size(); i++) { Tag tag = currentEntryTags.get(i); // TODO: break this when it's out of possibility. // Opened in the trimmed part, closed after the trimmed part. if (!tag.isOmitted() && tag.getOpeningTagStartPosition() > trimStartPosition && tag.getOpeningTagStartPosition() < trimEndPosition && tag.getClosingTagStartPosition() > trimEndPosition) { openedNotClosedYetTags.add(tag); } } List<Tag> prevOpenedTags = getToc().getNavMap().getNavPoints().get(index).getOpenTags(); if (prevOpenedTags != null) { for (Tag prevOpenedTag : prevOpenedTags) { // If the tag ends before text starts, tag should open and then close, // If tag does not end before text starts, tag should be placed in the beginning. if (prevOpenedTag.getClosingTagStartPosition() > trimEndPosition) { // Previously opened and not yet closed in scope tags. Should have a place in the beginning. prevOpenedNotClosedYetTags.add(prevOpenedTag); } else { // Previously opened but closed in scope tags. // TODO: Find these tags a position :( Or just append them from the beginning. I don't think it would break anything, would it? prevOpenedClosedTags.add(prevOpenedTag); } } } Pair<String, List<String>> htmlBodyMarkingsPair = null; if (Optionals.cssStatus == CssStatus.OMIT) { // Tag omitting only happens in replaceTableTag function when css status is given Omit. htmlBodyMarkingsPair = markOmittedTags(currentEntryTags, htmlBody, trimStartPosition, trimEndPosition); if (htmlBodyMarkingsPair != null) { htmlBody = htmlBodyMarkingsPair.getFirst(); } } // TODO: We shouldn't substring htmlBody before this method. if (trimEndPosition == htmlBody.length()) { htmlBodyToReplace = htmlBody.substring(trimStartPosition); } else { htmlBodyToReplace = htmlBody.substring(trimStartPosition, trimEndPosition); } if (htmlBodyMarkingsPair != null) { List<String> stringsToRemove = htmlBodyMarkingsPair.getSecond(); if (stringsToRemove != null) { for (String stringToRemove : stringsToRemove) { htmlBodyToReplace = htmlBodyToReplace.replace(stringToRemove, ""); } } } String openingTags = ""; String closingTags = ""; if (!openedNotClosedYetTags.isEmpty()) { closingTags += prepareClosingTags(openedNotClosedYetTags); } if (!prevOpenedNotClosedYetTags.isEmpty()) { openingTags += prepareOpeningTags(prevOpenedNotClosedYetTags); closingTags += prepareClosingTags(prevOpenedNotClosedYetTags); } if (!prevOpenedClosedTags.isEmpty()) { openingTags += prepareOpeningTags(prevOpenedClosedTags); } if (!openingTags.isEmpty() || !closingTags.isEmpty()) { htmlBodyToReplace = openingTags + htmlBodyToReplace + closingTags; } if (getToc().getNavMap().getNavPoints().size() > (index + 1)) { // If this is not the last page, next navPoint should start with not closed yet tags because they are not closed in this navPoint as well. openedNotClosedYetTags.addAll(prevOpenedNotClosedYetTags); getToc().getNavMap().getNavPoints().get(index + 1).setOpenTags(openedNotClosedYetTags.isEmpty() ? null : openedNotClosedYetTags); } else { openedNotClosedYetTags.addAll(prevOpenedNotClosedYetTags); if (!openedNotClosedYetTags.isEmpty()) { // openedTags should already be null if this is the last page. throw new ReadingException("Last Page has opened and not yet closed tags."); // For debugging purposes. } } return htmlBodyToReplace; } private Pair<String, List<String>> markOmittedTags(List<Tag> currentEntryTags, String htmlBody, int trimStartPosition, int trimEndPosition) { boolean isHtmlBodyModified = false; List<String> stringsToRemove = null; for (Tag tag : currentEntryTags) { if (tag.getOpeningTagStartPosition() > trimEndPosition) { break; } if (!tag.isOmitted()) { continue; } int fromIndex = -1; int toIndex = -1; if (tag.getOpeningTagStartPosition() == tag.getClosingTagStartPosition()) { // Empty Tag if (tag.getOpeningTagStartPosition() > trimStartPosition && tag.getOpeningTagStartPosition() < trimEndPosition) { fromIndex = tag.getOpeningTagStartPosition() - 1; toIndex = tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 2; } } else { if (tag.getOpeningTagStartPosition() > trimStartPosition && tag.getOpeningTagStartPosition() < trimEndPosition) { // Opening tag. fromIndex = tag.getOpeningTagStartPosition() - 1; toIndex = tag.getOpeningTagStartPosition() + tag.getFullTagName().length() + 1; } if (fromIndex != -1 && toIndex != -1) { htmlBody = htmlBody.substring(0, fromIndex) + Constants.STRING_MARKER + htmlBody.substring(fromIndex + Constants.STRING_MARKER.length(), toIndex - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER + htmlBody.substring(toIndex, htmlBody.length()); if (stringsToRemove == null) { stringsToRemove = new ArrayList<>(); } stringsToRemove.add(Constants.STRING_MARKER + htmlBody.substring(fromIndex + Constants.STRING_MARKER.length(), toIndex - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER); isHtmlBodyModified = true; } if (tag.getClosingTagStartPosition() > trimStartPosition && tag.getClosingTagStartPosition() < trimEndPosition) { // Closing tag. fromIndex = tag.getClosingTagStartPosition() - 1; toIndex = tag.getClosingTagStartPosition() + tag.getTagName().length() + 2; } } // If both opened and closed tags should be removed, skips the closing tag. if (fromIndex != -1 && toIndex != -1) { htmlBody = htmlBody.substring(0, fromIndex) + Constants.STRING_MARKER + htmlBody.substring(fromIndex + Constants.STRING_MARKER.length(), toIndex - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER + htmlBody.substring(toIndex, htmlBody.length()); if (stringsToRemove == null) { stringsToRemove = new ArrayList<>(); } stringsToRemove.add(Constants.STRING_MARKER + htmlBody.substring(fromIndex + Constants.STRING_MARKER.length(), toIndex - Constants.STRING_MARKER.length()) + Constants.STRING_MARKER); isHtmlBodyModified = true; } } return isHtmlBodyModified ? new Pair<>(htmlBody, stringsToRemove) : null; } private void addTagToInsert(List<Pair<Tag, Integer>> tagPositionPairList, Tag tag, int cursor) { int insertPosition = tagPositionPairList.size(); while (insertPosition > 0 && tagPositionPairList.get(insertPosition - 1).getSecond() >= cursor) { // Insert tags in descending order to insert them from last to first. insertPosition--; } tagPositionPairList.add(insertPosition, new Pair<>(tag, cursor)); } byte[] getCoverImage() throws ReadingException { Metadata metadata = this.opfPackage.getMetadata(); if (this.opfPackage != null && metadata != null) { String coverImageId = metadata.getCoverImageId(); if (coverImageId != null && !coverImageId.equals("")) { List<XmlItem> manifestXmlItems = this.opfPackage.getManifest().getXmlItemList(); for (XmlItem xmlItem : manifestXmlItems) { if (xmlItem.getAttributes().get("id").equals(coverImageId)) { String coverImageEntryName = xmlItem.getAttributes().get("href"); if (coverImageEntryName != null && !coverImageEntryName.equals("")) { ZipFile epubFile = null; try { try { epubFile = new ZipFile(this.getZipFilePath()); } catch (IOException e) { e.printStackTrace(); throw new ReadingException("Error initializing ZipFile: " + e.getMessage()); } for (String entryName : this.getEntryNames()) { // TODO: I might have to change this contains with equals. if (entryName.contains(coverImageEntryName)) { ZipEntry coverImageEntry = epubFile.getEntry(entryName); InputStream inputStream; try { inputStream = epubFile.getInputStream(coverImageEntry); } catch (IOException e) { e.printStackTrace(); throw new ReadingException("IOException while reading " + entryName + " file: " + e.getMessage()); } try { return ContextHelper.convertIsToByteArray(inputStream); } catch (IOException e) { e.printStackTrace(); throw new ReadingException("IOException while converting inputStream to byte array: " + e.getMessage()); } } } } finally { try { if (epubFile != null) { epubFile.close(); } } catch (IOException e) { e.printStackTrace(); throw new ReadingException("Error closing ZipFile: " + e.getMessage()); } } } } } } } return null; } List<String> getEntryNames() { return entryNames; } void addEntryName(String zipEntryName) { entryNames.add(zipEntryName); } Container getContainer() { return container; } Package getPackage() { return opfPackage; } Toc getToc() { return toc; } void setZipFilePath(String zipFilePath) { this.zipFilePath = zipFilePath; } String getZipFilePath() { return this.zipFilePath; } }
minor changes
src/main/java/com/github/mertakdut/Content.java
minor changes
Java
apache-2.0
84dfd4794382ffd2a368cd4a601e57df19bebdb1
0
apache/shiro,relateiq/shiro,apache/shiro,feige712/shiro,feige712/shiro
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.shiro.tools.hasher; import org.apache.commons.cli.*; import org.apache.shiro.codec.Base64; import org.apache.shiro.codec.Hex; import org.apache.shiro.crypto.SecureRandomNumberGenerator; import org.apache.shiro.crypto.UnknownAlgorithmException; import org.apache.shiro.crypto.hash.SimpleHash; import org.apache.shiro.io.ResourceUtils; import org.apache.shiro.util.ByteSource; import org.apache.shiro.util.JavaEnvironment; import org.apache.shiro.util.StringUtils; import java.io.File; import java.io.IOException; import java.util.Arrays; /** * Commandline line utility to hash data such as strings, passwords, resources (files, urls, etc). * <p/> * Usage: * <pre> * java -jar shiro-tools-hasher<em>-version</em>-cli.jar * </pre> * This will print out all supported options with documentation. * * @since 1.2 */ public final class Hasher { private static final Option ALGORITHM = new Option("a", "algorithm", true, "hash algorithm name. Defaults to MD5."); private static final Option DEBUG = new Option("d", "debug", false, "show additional error (stack trace) information."); private static final Option HELP = new Option("help", "help", false, "show this help message."); private static final Option HEX = new Option("h", "hex", false, "display a hex value instead of Base64."); private static final Option ITERATIONS = new Option("i", "iterations", true, "number of hash iterations. Defaults to 1."); private static final Option NO_FORMAT = new Option("nf", "noformat", false, "turn off output formatting. Any generated salt will be placed after the hash separated by a space."); private static final Option PASSWORD = new Option("p", "password", false, "hash a password (disable typing echo)"); private static final Option PASSWORD_NC = new Option("pnc", "pnoconfirm", false, "hash a password (disable typing echo) but disable password confirmation prompt."); private static final Option RESOURCE = new Option("r", "resource", false, "read and hash the resource located at <value>. See below for more information."); private static final Option SALT = new Option("s", "salt", true, "use the specified salt. <arg> is plaintext."); private static final Option SALT_BYTES = new Option("sb", "saltbytes", true, "use the specified salt bytes. <arg> is hex or base64 encoded text."); private static final Option SALT_GEN = new Option("gs", "gensalt", false, "generate and use a random salt."); private static final Option SALT_GEN_HEX = new Option("gsh", "gensalthex", false, "display the generated salt's hex value instead of Base64."); private static final Option SALT_GEN_SIZE = new Option("gss", "gensaltsize", true, "the number of salt bits (not bytes!) to generate. Defaults to 128."); private static final Option SHIRO = new Option("shiro", "shiro", false, "display output in the Shiro password file format (.ini [users] config)."); private static final String HEX_PREFIX = "0x"; private static final String DEFAULT_ALGORITHM_NAME = "MD5"; private static final int DEFAULT_GENERATED_SALT_SIZE = 128; private static final int DEFAULT_NUM_ITERATIONS = 1; private static final String SALT_MUTEX_MSG = createMutexMessage(SALT, SALT_BYTES); static { ALGORITHM.setArgName("name"); SALT_GEN_SIZE.setArgName("numBits"); ITERATIONS.setArgName("num"); SALT.setArgName("sval"); SALT_BYTES.setArgName("encTxt"); } public static void main(String[] args) { CommandLineParser parser = new PosixParser(); Options options = new Options(); options.addOption(HELP).addOption(DEBUG).addOption(ALGORITHM).addOption(HEX).addOption(ITERATIONS); options.addOption(RESOURCE).addOption(PASSWORD).addOption(PASSWORD_NC); options.addOption(SALT).addOption(SALT_BYTES).addOption(SALT_GEN).addOption(SALT_GEN_SIZE).addOption(SALT_GEN_HEX); options.addOption(NO_FORMAT).addOption(SHIRO); boolean debug = false; String algorithm = DEFAULT_ALGORITHM_NAME; int iterations = DEFAULT_NUM_ITERATIONS; boolean base64 = true; boolean resource = false; boolean password = false; boolean passwordConfirm = true; String saltString = null; String saltBytesString = null; boolean generateSalt = false; boolean generatedSaltBase64 = true; int generatedSaltSize = DEFAULT_GENERATED_SALT_SIZE; boolean shiroFormat = false; boolean format = true; char[] passwordChars = null; try { CommandLine line = parser.parse(options, args); if (line.hasOption(HELP.getOpt())) { printHelpAndExit(options, null, debug, 0); } if (line.hasOption(DEBUG.getOpt())) { debug = true; } if (line.hasOption(ALGORITHM.getOpt())) { algorithm = line.getOptionValue(ALGORITHM.getOpt()); } if (line.hasOption(ITERATIONS.getOpt())) { iterations = getRequiredPositiveInt(line, ITERATIONS); } if (line.hasOption(HEX.getOpt())) { base64 = false; } if (line.hasOption(PASSWORD.getOpt())) { password = true; } if (line.hasOption(RESOURCE.getOpt())) { resource = true; } if (line.hasOption(PASSWORD_NC.getOpt())) { password = true; passwordConfirm = false; } if (line.hasOption(SALT.getOpt())) { saltString = line.getOptionValue(SALT.getOpt()); } if (line.hasOption(SALT_BYTES.getOpt())) { saltBytesString = line.getOptionValue(SALT_BYTES.getOpt()); } if (line.hasOption(SALT_GEN.getOpt())) { generateSalt = true; } if (line.hasOption(SALT_GEN_HEX.getOpt())) { generateSalt = true; generatedSaltBase64 = false; } if (line.hasOption(SALT_GEN_SIZE.getOpt())) { generateSalt = true; generatedSaltSize = getRequiredPositiveInt(line, SALT_GEN_SIZE); if (generatedSaltSize % 8 != 0) { throw new IllegalArgumentException("Generated salt size must be a multiple of 8 (e.g. 128, 192, 256, 512, etc)."); } } if (line.hasOption(NO_FORMAT.getOpt())) { format = false; } if (line.hasOption(SHIRO.getOpt())) { shiroFormat = true; } String sourceValue = null; Object source; if (password) { passwordChars = readPassword(passwordConfirm); source = passwordChars; } else { String[] remainingArgs = line.getArgs(); if (remainingArgs == null || remainingArgs.length != 1) { printHelpAndExit(options, null, debug, -1); } assert remainingArgs != null; sourceValue = toString(remainingArgs); if (resource) { if (!ResourceUtils.hasResourcePrefix(sourceValue)) { source = toFile(sourceValue); } else { source = ResourceUtils.getInputStreamForPath(sourceValue); } } else { source = sourceValue; } } ByteSource salt = getSalt(saltString, saltBytesString, generateSalt, generatedSaltSize); SimpleHash hash = new SimpleHash(algorithm, source, salt, iterations); StringBuilder output; if (shiroFormat) { output = formatForShiroIni(hash, base64, salt, generatedSaltBase64, generateSalt); } else if (format) { output = format(hash, base64, salt, generatedSaltBase64, generateSalt, algorithm, sourceValue); } else { output = formatMinimal(hash, base64, salt, generatedSaltBase64, generateSalt); } System.out.println(output); } catch (IllegalArgumentException iae) { exit(iae, debug); } catch (UnknownAlgorithmException uae) { exit(uae, debug); } catch (IOException ioe) { exit(ioe, debug); } catch (Exception e) { printHelpAndExit(options, e, debug, -1); } finally { if (passwordChars != null && passwordChars.length > 0) { for (int i = 0; i < passwordChars.length; i++) { passwordChars[i] = ' '; } } } } private static String createMutexMessage(Option... options) { StringBuilder sb = new StringBuilder(); sb.append("The "); for (int i = 0; i < options.length; i++) { if (i > 0) { sb.append(", "); } Option o = options[0]; sb.append("-").append(o.getOpt()).append("/--").append(o.getLongOpt()); } sb.append(" and generated salt options are mutually exclusive. Only one of them may be used at a time"); return sb.toString(); } private static void exit(Exception e, boolean debug) { printException(e, debug); System.exit(-1); } private static StringBuilder format(ByteSource hash, boolean hashBase64, ByteSource salt, boolean saltBase64, boolean showSalt, String alg, String value) { StringBuilder sb = new StringBuilder(); sb.append(alg).append("(").append(value).append(")"); if (hashBase64) { sb.append(" base64 = ").append(hash.toBase64()); } else { sb.append(" hex = ").append(hash.toHex()); } if (showSalt && salt != null) { sb.append("\nGenerated salt"); if (saltBase64) { sb.append(" base64 = ").append(salt.toBase64()); } else { sb.append(" hex = ").append(salt.toHex()); } } return sb; } private static StringBuilder formatForShiroIni(ByteSource hash, boolean hashBase64, ByteSource salt, boolean saltBase64, boolean showSalt) { StringBuilder sb = new StringBuilder(); if (hashBase64) { sb.append(hash.toBase64()); } else { //hex: sb.append(HEX_PREFIX).append(hash.toHex()); } if (showSalt && salt != null) { sb.append(" "); if (saltBase64) { sb.append(salt.toBase64()); } else { //hex: sb.append(HEX_PREFIX).append(salt.toHex()); } } return sb; } private static StringBuilder formatMinimal(ByteSource hash, boolean hashBase64, ByteSource salt, boolean saltBase64, boolean showSalt) { StringBuilder sb = new StringBuilder(); if (hashBase64) { sb.append(hash.toBase64()); } else { sb.append(hash.toHex()); } if (showSalt && salt != null) { sb.append(" "); if (saltBase64) { sb.append(salt.toBase64()); } else { sb.append(salt.toHex()); } } return sb; } private static int getRequiredPositiveInt(CommandLine line, Option option) { String iterVal = line.getOptionValue(option.getOpt()); try { return Integer.parseInt(iterVal); } catch (NumberFormatException e) { String msg = "'" + option.getLongOpt() + "' value must be a positive integer."; throw new IllegalArgumentException(msg, e); } } private static ByteSource getSalt(String saltString, String saltBytesString, boolean generateSalt, int generatedSaltSize) { if (saltString != null) { if (generateSalt || (saltBytesString != null)) { throw new IllegalArgumentException(SALT_MUTEX_MSG); } return ByteSource.Util.bytes(saltString); } if (saltBytesString != null) { if (generateSalt) { throw new IllegalArgumentException(SALT_MUTEX_MSG); } String value = saltBytesString; boolean base64 = true; if (saltBytesString.startsWith(HEX_PREFIX)) { //hex: base64 = false; value = value.substring(HEX_PREFIX.length()); } byte[] bytes; if (base64) { bytes = Base64.decode(value); } else { bytes = Hex.decode(value); } return ByteSource.Util.bytes(bytes); } if (generateSalt) { SecureRandomNumberGenerator generator = new SecureRandomNumberGenerator(); int byteSize = generatedSaltSize / 8; //generatedSaltSize is in *bits* - convert to byte size: return generator.nextBytes(byteSize); } //no salt used: return null; } private static void printException(Exception e, boolean debug) { if (e != null) { System.out.println(); if (debug) { System.out.println("Error: "); e.printStackTrace(System.out); System.out.println(e.getMessage()); } else { System.out.println("Error: " + e.getMessage()); System.out.println(); System.out.println("Specify -d or --debug for more information."); } } } private static void printHelp(Options options, Exception e, boolean debug) { HelpFormatter help = new HelpFormatter(); String command = "java -jar shiro-tools-hasher-<version>.jar [options] [<value>]"; String header = "\nPrint a cryptographic hash (aka message digest) of the specified <value>.\n--\nOptions:"; String footer = "\n" + "<value> is optional only when hashing passwords (see below). It is\n" + "required all other times." + "\n\n" + "Password Hashing:\n" + "---------------------------------\n" + "Specify the -p/--password option and DO NOT enter a <value>. You will\n" + "be prompted for a password and characters will not echo as you type." + "\n\n" + "Salting:\n" + "---------------------------------\n" + "Specifying a salt:" + "\n\n" + "You may specify a salt using the -s/--salt option followed by the salt\n" + "value. If the salt value is a base64 or hex string representing a\n" + "byte array, you must specify the -sb/--saltbytes option to indicate this,\n" + "otherwise the text value bytes will be used directly." + "\n\n" + "When using -sb/--saltbytes, the -s/--salt value is expected to be a\n" + "base64-encoded string by default. If the value is a hex-encoded string,\n" + "you must prefix the string with 0x (zero x) to indicate a hex value." + "\n\n" + "Generating a salt:" + "\n\n" + "Use the -sg/--saltgenerated option if you don't want to specify a salt,\n" + "but want a strong random salt to be generated and used during hashing.\n" + "The generated salt size defaults to 128 bytes. You may specify\n" + "a different size by using the -sgs/--saltgeneratedsize option followed by\n" + "a positive integer." + "\n\n" + "Because a salt must be specified if computing the\n" + "hash later, generated salts will be printed, defaulting to base64\n" + "encoding. If you prefer to use hex encoding, additionally use the\n" + "-sgh/--saltgeneratedhex option." + "\n\n" + "Files, URLs and classpath resources:\n" + "---------------------------------\n" + "If using the -r/--resource option, the <value> represents a resource path.\n" + "By default this is expected to be a file path, but you may specify\n" + "classpath or URL resources by using the classpath: or url: prefix\n" + "respectively." + "\n\n" + "Some examples:" + "\n\n" + "<command> -r fileInCurrentDirectory.txt\n" + "<command> -r ../../relativePathFile.xml\n" + "<command> -r ~/documents/myfile.pdf\n" + "<command> -r /usr/local/logs/absolutePathFile.log\n" + "<command> -r url:http://foo.com/page.html\n" + "<command> -r classpath:/WEB-INF/lib/something.jar"; printException(e, debug); System.out.println(); help.printHelp(command, header, options, null); System.out.println(footer); } private static void printHelpAndExit(Options options, Exception e, boolean debug, int exitCode) { printHelp(options, e, debug); System.exit(exitCode); } private static char[] readPassword(boolean confirm) { if (!JavaEnvironment.isAtLeastVersion16()) { String msg = "Password hashing (prompt without echo) uses the java.io.Console to read passwords " + "safely. This is only available on Java 1.6 platforms and later."; throw new IllegalArgumentException(msg); } java.io.Console console = System.console(); if (console == null) { throw new IllegalStateException("java.io.Console is not available on the current JVM. Cannot read passwords."); } char[] first = console.readPassword("%s", "Password to hash: "); if (first == null || first.length == 0) { throw new IllegalArgumentException("No password specified."); } if (confirm) { char[] second = console.readPassword("%s", "Password to hash (confirm): "); if (!Arrays.equals(first, second)) { String msg = "Password entries do not match."; throw new IllegalArgumentException(msg); } } return first; } private static File toFile(String path) { String resolved = path; if (path.startsWith("~/") || path.startsWith(("~\\"))) { resolved = path.replaceFirst("\\~", System.getProperty("user.home")); } return new File(resolved); } private static String toString(String[] strings) { int len = strings != null ? strings.length : 0; if (len == 0) { return null; } return StringUtils.toDelimitedString(strings, " "); } }
tools/hasher/src/main/java/org/apache/shiro/tools/hasher/Hasher.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.shiro.tools.hasher; import org.apache.commons.cli.*; import org.apache.shiro.codec.Base64; import org.apache.shiro.codec.Hex; import org.apache.shiro.crypto.SecureRandomNumberGenerator; import org.apache.shiro.crypto.UnknownAlgorithmException; import org.apache.shiro.crypto.hash.SimpleHash; import org.apache.shiro.io.ResourceUtils; import org.apache.shiro.util.ByteSource; import org.apache.shiro.util.JavaEnvironment; import org.apache.shiro.util.StringUtils; import java.io.File; import java.io.IOException; import java.util.Arrays; /** * Commandline line utility to hash data such as strings, passwords, resources (files, urls, etc). * <p/> * Usage: * <pre> * java -jar shiro-tools-hasher<em>-version</em>-cli.jar * </pre> * This will print out all supported options with documentation. * * @since 1.2 */ public final class Hasher { private static final Option ALGORITHM = new Option("a", "algorithm", true, "hash algorithm name. Defaults to MD5."); private static final Option DEBUG = new Option("d", "debug", false, "show additional error (stack trace) information."); private static final Option HELP = new Option("help", "help", false, "show this help message."); private static final Option HEX = new Option("h", "hex", false, "display a hex value instead of Base64."); private static final Option ITERATIONS = new Option("i", "iterations", true, "number of hash iterations. Defaults to 1."); private static final Option NO_FORMAT = new Option("nf", "noformat", false, "turn off output formatting. Any generated salt will be placed after the hash separated by a space."); private static final Option PASSWORD = new Option("p", "password", false, "hash a password (disable typing echo)"); private static final Option PASSWORD_NC = new Option("pnc", "pnoconfirm", false, "disable password hash confirmation prompt."); private static final Option RESOURCE = new Option("r", "resource", false, "read and hash the resource located at <value>. See below for more information."); private static final Option SALT = new Option("s", "salt", true, "use the specified salt. <arg> is plaintext."); private static final Option SALT_BYTES = new Option("sb", "saltbytes", true, "use the specified salt bytes. <arg> is hex or base64 encoded text."); private static final Option SALT_GEN = new Option("gs", "gensalt", false, "generate and use a random salt."); private static final Option SALT_GEN_HEX = new Option("gsh", "gensalthex", false, "display the generated salt's hex value instead of Base64."); private static final Option SALT_GEN_SIZE = new Option("gss", "gensaltsize", true, "the number of salt bits (not bytes!) to generate. Defaults to 128."); private static final Option SHIRO = new Option("shiro", "shiro", false, "display output in the Shiro password file format (.ini [users] config)."); private static final String HEX_PREFIX = "0x"; private static final String DEFAULT_ALGORITHM_NAME = "MD5"; private static final int DEFAULT_GENERATED_SALT_SIZE = 128; private static final int DEFAULT_NUM_ITERATIONS = 1; private static final String SALT_MUTEX_MSG = createMutexMessage(SALT, SALT_BYTES); public static void main(String[] args) { CommandLineParser parser = new PosixParser(); Options options = new Options(); options.addOption(HELP).addOption(DEBUG).addOption(ALGORITHM).addOption(HEX).addOption(ITERATIONS); options.addOption(RESOURCE).addOption(PASSWORD).addOption(PASSWORD_NC); options.addOption(SALT).addOption(SALT_BYTES).addOption(SALT_GEN).addOption(SALT_GEN_SIZE).addOption(SALT_GEN_HEX); options.addOption(NO_FORMAT).addOption(SHIRO); boolean debug = false; String algorithm = DEFAULT_ALGORITHM_NAME; int iterations = DEFAULT_NUM_ITERATIONS; boolean base64 = true; boolean resource = false; boolean password = false; boolean passwordConfirm = true; String saltString = null; String saltBytesString = null; boolean generateSalt = false; boolean generatedSaltBase64 = true; int generatedSaltSize = DEFAULT_GENERATED_SALT_SIZE; boolean shiroFormat = false; boolean format = true; char[] passwordChars = null; try { CommandLine line = parser.parse(options, args); if (line.hasOption(HELP.getOpt())) { printHelpAndExit(options, null, debug, 0); } if (line.hasOption(DEBUG.getOpt())) { debug = true; } if (line.hasOption(ALGORITHM.getOpt())) { algorithm = line.getOptionValue(ALGORITHM.getOpt()); } if (line.hasOption(ITERATIONS.getOpt())) { iterations = getRequiredPositiveInt(line, ITERATIONS); } if (line.hasOption(HEX.getOpt())) { base64 = false; } if (line.hasOption(PASSWORD.getOpt())) { password = true; } if (line.hasOption(RESOURCE.getOpt())) { resource = true; } if (line.hasOption(PASSWORD_NC.getOpt())) { passwordConfirm = false; } if (line.hasOption(SALT.getOpt())) { saltString = line.getOptionValue(SALT.getOpt()); } if (line.hasOption(SALT_BYTES.getOpt())) { saltBytesString = line.getOptionValue(SALT_BYTES.getOpt()); } if (line.hasOption(SALT_GEN.getOpt())) { generateSalt = true; } if (line.hasOption(SALT_GEN_HEX.getOpt())) { generateSalt = true; generatedSaltBase64 = false; } if (line.hasOption(SALT_GEN_SIZE.getOpt())) { generateSalt = true; generatedSaltSize = getRequiredPositiveInt(line, SALT_GEN_SIZE); if (generatedSaltSize % 8 != 0) { throw new IllegalArgumentException("Generated salt size must be a multiple of 8 (e.g. 128, 192, 256, 512, etc)."); } } if (line.hasOption(NO_FORMAT.getOpt())) { format = false; } if (line.hasOption(SHIRO.getOpt())) { shiroFormat = true; } String sourceValue = null; Object source; if (password) { passwordChars = readPassword(passwordConfirm); source = passwordChars; } else { String[] remainingArgs = line.getArgs(); if (remainingArgs == null || remainingArgs.length != 1) { printHelpAndExit(options, null, debug, -1); } assert remainingArgs != null; sourceValue = toString(remainingArgs); if (resource) { if (!ResourceUtils.hasResourcePrefix(sourceValue)) { source = toFile(sourceValue); } else { source = ResourceUtils.getInputStreamForPath(sourceValue); } } else { source = sourceValue; } } ByteSource salt = getSalt(saltString, saltBytesString, generateSalt, generatedSaltSize); SimpleHash hash = new SimpleHash(algorithm, source, salt, iterations); StringBuilder output; if (shiroFormat) { output = formatForShiroIni(hash, base64, salt, generatedSaltBase64, generateSalt); } else if (format) { output = format(hash, base64, salt, generatedSaltBase64, generateSalt, algorithm, sourceValue); } else { output = formatMinimal(hash, base64, salt, generatedSaltBase64, generateSalt); } System.out.println(output); } catch (IllegalArgumentException iae) { exit(iae, debug); } catch (UnknownAlgorithmException uae) { exit(uae, debug); } catch (IOException ioe) { exit(ioe, debug); } catch (Exception e) { printHelpAndExit(options, e, debug, -1); } finally { if (passwordChars != null && passwordChars.length > 0) { for (int i = 0; i < passwordChars.length; i++) { passwordChars[i] = ' '; } } } } private static String createMutexMessage(Option... options) { StringBuilder sb = new StringBuilder(); sb.append("The "); for (int i = 0; i < options.length; i++) { if (i > 0) { sb.append(", "); } Option o = options[0]; sb.append("-").append(o.getOpt()).append("/--").append(o.getLongOpt()); } sb.append(" and generated salt options are mutually exclusive. Only one of them may be used at a time"); return sb.toString(); } private static void exit(Exception e, boolean debug) { printException(e, debug); System.exit(-1); } private static StringBuilder format(ByteSource hash, boolean hashBase64, ByteSource salt, boolean saltBase64, boolean showSalt, String alg, String value) { StringBuilder sb = new StringBuilder(); sb.append(alg).append("(").append(value).append(")"); if (hashBase64) { sb.append(" base64 = ").append(hash.toBase64()); } else { sb.append(" hex = ").append(hash.toHex()); } if (showSalt && salt != null) { sb.append("\nGenerated salt"); if (saltBase64) { sb.append(" base64 = ").append(salt.toBase64()); } else { sb.append(" hex = ").append(salt.toHex()); } } return sb; } private static StringBuilder formatForShiroIni(ByteSource hash, boolean hashBase64, ByteSource salt, boolean saltBase64, boolean showSalt) { StringBuilder sb = new StringBuilder(); if (hashBase64) { sb.append(hash.toBase64()); } else { //hex: sb.append(HEX_PREFIX).append(hash.toHex()); } if (showSalt && salt != null) { sb.append(" "); if (saltBase64) { sb.append(salt.toBase64()); } else { //hex: sb.append(HEX_PREFIX).append(salt.toHex()); } } return sb; } private static StringBuilder formatMinimal(ByteSource hash, boolean hashBase64, ByteSource salt, boolean saltBase64, boolean showSalt) { StringBuilder sb = new StringBuilder(); if (hashBase64) { sb.append(hash.toBase64()); } else { sb.append(hash.toHex()); } if (showSalt && salt != null) { sb.append(" "); if (saltBase64) { sb.append(salt.toBase64()); } else { sb.append(salt.toHex()); } } return sb; } private static int getRequiredPositiveInt(CommandLine line, Option option) { String iterVal = line.getOptionValue(option.getOpt()); try { return Integer.parseInt(iterVal); } catch (NumberFormatException e) { String msg = "'" + option.getLongOpt() + "' value must be a positive integer."; throw new IllegalArgumentException(msg, e); } } private static ByteSource getSalt(String saltString, String saltBytesString, boolean generateSalt, int generatedSaltSize) { if (saltString != null) { if (generateSalt || (saltBytesString != null)) { throw new IllegalArgumentException(SALT_MUTEX_MSG); } return ByteSource.Util.bytes(saltString); } if (saltBytesString != null) { if (generateSalt) { throw new IllegalArgumentException(SALT_MUTEX_MSG); } String value = saltBytesString; boolean base64 = true; if (saltBytesString.startsWith(HEX_PREFIX)) { //hex: base64 = false; value = value.substring(HEX_PREFIX.length()); } byte[] bytes; if (base64) { bytes = Base64.decode(value); } else { bytes = Hex.decode(value); } return ByteSource.Util.bytes(bytes); } if (generateSalt) { SecureRandomNumberGenerator generator = new SecureRandomNumberGenerator(); int byteSize = generatedSaltSize / 8; //generatedSaltSize is in *bits* - convert to byte size: return generator.nextBytes(byteSize); } //no salt used: return null; } private static void printException(Exception e, boolean debug) { if (e != null) { System.out.println(); if (debug) { System.out.println("Error: "); e.printStackTrace(System.out); System.out.println(e.getMessage()); } else { System.out.println("Error: " + e.getMessage()); System.out.println(); System.out.println("Specify -d or --debug for more information."); } } } private static void printHelp(Options options, Exception e, boolean debug) { HelpFormatter help = new HelpFormatter(); String command = "java -jar shiro-tools-hasher-<version>.jar [options] [<value>]"; String header = "\nPrint a cryptographic hash (aka message digest) of the specified <value>.\n--\nOptions:"; String footer = "\n" + "<value> is optional only when hashing passwords (see below). It is\n" + "required all other times." + "\n\n" + "Password Hashing:\n" + "---------------------------------\n" + "Specify the -p/--password option and DO NOT enter a <value>. You will\n" + "be prompted for a password and characters will not echo as you type." + "\n\n" + "Salting:\n" + "---------------------------------\n" + "Specifying a salt:" + "\n\n" + "You may specify a salt using the -s/--salt option followed by the salt\n" + "value. If the salt value is a base64 or hex string representing a\n" + "byte array, you must specify the -sb/--saltbytes option to indicate this,\n" + "otherwise the text value bytes will be used directly." + "\n\n" + "When using -sb/--saltbytes, the -s/--salt value is expected to be a\n" + "base64-encoded string by default. If the value is a hex-encoded string,\n" + "you must prefix the string with 0x (zero x) to indicate a hex value." + "\n\n" + "Generating a salt:" + "\n\n" + "Use the -sg/--saltgenerated option if you don't want to specify a salt,\n" + "but want a strong random salt to be generated and used during hashing.\n" + "The generated salt size defaults to 128 bytes. You may specify\n" + "a different size by using the -sgs/--saltgeneratedsize option followed by\n" + "a positive integer." + "\n\n" + "Because a salt must be specified if computing the\n" + "hash later, generated salts will be printed, defaulting to base64\n" + "encoding. If you prefer to use hex encoding, additionally use the\n" + "-sgh/--saltgeneratedhex option." + "\n\n" + "Files, URLs and classpath resources:\n" + "---------------------------------\n" + "If using the -r/--resource option, the <value> represents a resource path.\n" + "By default this is expected to be a file path, but you may specify\n" + "classpath or URL resources by using the classpath: or url: prefix\n" + "respectively." + "\n\n" + "Some examples:" + "\n\n" + "<command> -r fileInCurrentDirectory.txt\n" + "<command> -r ../../relativePathFile.xml\n" + "<command> -r ~/documents/myfile.pdf\n" + "<command> -r /usr/local/logs/absolutePathFile.log\n" + "<command> -r url:http://foo.com/page.html\n" + "<command> -r classpath:/WEB-INF/lib/something.jar"; printException(e, debug); System.out.println(); help.printHelp(command, header, options, null); System.out.println(footer); } private static void printHelpAndExit(Options options, Exception e, boolean debug, int exitCode) { printHelp(options, e, debug); System.exit(exitCode); } private static char[] readPassword(boolean confirm) { if (!JavaEnvironment.isAtLeastVersion16()) { String msg = "Password hashing (prompt without echo) uses the java.io.Console to read passwords " + "safely. This is only available on Java 1.6 platforms and later."; throw new IllegalArgumentException(msg); } java.io.Console console = System.console(); if (console == null) { throw new IllegalStateException("java.io.Console is not available on the current JVM. Cannot read passwords."); } char[] first = console.readPassword("%s", "Password to hash: "); if (first == null || first.length == 0) { throw new IllegalArgumentException("No password specified."); } if (confirm) { char[] second = console.readPassword("%s", "Password to hash (confirm): "); if (!Arrays.equals(first, second)) { String msg = "Password entries do not match."; throw new IllegalArgumentException(msg); } } return first; } private static File toFile(String path) { String resolved = path; if (path.startsWith("~/") || path.startsWith(("~\\"))) { resolved = path.replaceFirst("\\~", System.getProperty("user.home")); } return new File(resolved); } private static String toString(String[] strings) { int len = strings != null ? strings.length : 0; if (len == 0) { return null; } return StringUtils.toDelimitedString(strings, " "); } static { ALGORITHM.setArgName("name"); SALT_GEN_SIZE.setArgName("numBits"); ITERATIONS.setArgName("num"); SALT.setArgName("sval"); SALT_BYTES.setArgName("encTxt"); } }
SHIRO-279: Minor alterations. Needs to be updated to support a Modular Crypt Format (http://packages.python.org/passlib/modular_crypt_format.html, http://docstore.mik.ua/orelly/other/puis3rd/0596003234_puis3-chp-4-sect-3.html) git-svn-id: 33e7a68b83922cb39bd2fd584a64617217729dea@1139964 13f79535-47bb-0310-9956-ffa450edef68
tools/hasher/src/main/java/org/apache/shiro/tools/hasher/Hasher.java
SHIRO-279: Minor alterations. Needs to be updated to support a Modular Crypt Format (http://packages.python.org/passlib/modular_crypt_format.html, http://docstore.mik.ua/orelly/other/puis3rd/0596003234_puis3-chp-4-sect-3.html)
Java
apache-2.0
77f263b878ba0fc280c9e405e3ea08356b676c7b
0
AmritShokar/cordova-network-info-plugin,AmritShokar/cordova-network-info-plugin,AmritShokar/cordova-network-info-plugin,AmritShokar/cordova-network-info-plugin
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.cordova.networkinformation; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaInterface; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.LOG; import org.apache.cordova.PluginResult; import org.apache.cordova.CordovaWebView; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.net.ConnectivityManager; import android.net.Network; import android.net.NetworkCapabilities; import android.net.NetworkInfo; import android.net.NetworkRequest; import android.net.wifi.WifiConfiguration; import android.net.wifi.WifiManager; import android.os.Build; import android.os.Handler; import android.util.Log; import android.widget.Toast; import java.lang.reflect.Method; import java.util.Locale; public class NetworkManager extends CordovaPlugin { public static int NOT_REACHABLE = 0; public static int REACHABLE_VIA_CARRIER_DATA_NETWORK = 1; public static int REACHABLE_VIA_WIFI_NETWORK = 2; public static final String WIFI = "wifi"; public static final String WIMAX = "wimax"; // mobile public static final String MOBILE = "mobile"; // Android L calls this Cellular, because I have no idea! public static final String CELLULAR = "cellular"; // 2G network types public static final String TWO_G = "2g"; public static final String GSM = "gsm"; public static final String GPRS = "gprs"; public static final String EDGE = "edge"; // 3G network types public static final String THREE_G = "3g"; public static final String CDMA = "cdma"; public static final String UMTS = "umts"; public static final String HSPA = "hspa"; public static final String HSUPA = "hsupa"; public static final String HSDPA = "hsdpa"; public static final String ONEXRTT = "1xrtt"; public static final String EHRPD = "ehrpd"; // 4G network types public static final String FOUR_G = "4g"; public static final String LTE = "lte"; public static final String UMB = "umb"; public static final String HSPA_PLUS = "hspa+"; // return type public static final String TYPE_UNKNOWN = "unknown"; public static final String TYPE_ETHERNET = "ethernet"; public static final String TYPE_ETHERNET_SHORT = "eth"; public static final String TYPE_WIFI = "wifi"; public static final String TYPE_2G = "2g"; public static final String TYPE_3G = "3g"; public static final String TYPE_4G = "4g"; public static final String TYPE_NONE = "none"; private static final String LOG_TAG = "NetworkManager"; private CallbackContext connectionCallbackContext; ConnectivityManager sockMan; BroadcastReceiver receiver; private JSONObject lastInfo = null; WifiManager wifiMan; Handler handler; // Initiated when there is no cellular present private boolean mobileDataEnabled = true; // Assume enabled private boolean handlerCheckEnabled = false; // Indicates when to use handler private boolean prevWifiState; // State of WiFi private boolean handlerCellCheckEnabled = true; // Restart handler after disabling WiFi so cellular status is updated private boolean satDisabled = true; // Custom configuration settings private final String satSSID = "YodelMe"; // Satellite SSID private final int handlerDelay = 60000; // Run handler every 5 mins private static final String SWITCH_TAG = "WiFiPreference"; /** * Sets the context of the Command. This can then be used to do things like * get file paths associated with the Activity. * * @param cordova The context of the main Activity. * @param webView The CordovaWebView Cordova is running in. */ public void initialize(CordovaInterface cordova, CordovaWebView webView) { super.initialize(cordova, webView); this.sockMan = (ConnectivityManager) cordova.getActivity().getSystemService(Context.CONNECTIVITY_SERVICE); this.connectionCallbackContext = null; this.wifiMan = (WifiManager) cordova.getActivity().getApplicationContext().getSystemService(Context.WIFI_SERVICE); // We need to listen to connectivity events to update navigator.connection IntentFilter intentFilter = new IntentFilter(); intentFilter.addAction(ConnectivityManager.CONNECTIVITY_ACTION); if (this.receiver == null) { this.receiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { // (The null check is for the ARM Emulator, please use Intel Emulator for better results) if(NetworkManager.this.webView != null) updateConnectionInfo(sockMan.getActiveNetworkInfo()); } }; webView.getContext().registerReceiver(this.receiver, intentFilter); } // Initially disable all satellite Access Points for (WifiConfiguration preconfigDisInit: wifiMan.getConfiguredNetworks()) { if (preconfigDisInit.SSID.contains(satSSID)) { //Log.d(SWITCH_TAG, "1) disabling "+preconfigDisInit.SSID); wifiMan.disableNetwork(preconfigDisInit.networkId); } } satDisabled = true; // satellites are initally disabled; // Initialize timed handler which checks for cellular network availability handler = new Handler(); handler.postDelayed(runnable, handlerDelay); } /** * Executes the request and returns PluginResult. * * @param action The action to execute. * @param args JSONArry of arguments for the plugin. * @param callbackContext The callback id used when calling back into JavaScript. * @return True if the action was valid, false otherwise. */ public boolean execute(String action, JSONArray args, CallbackContext callbackContext) { if (action.equals("getConnectionInfo")) { this.connectionCallbackContext = callbackContext; NetworkInfo info = sockMan.getActiveNetworkInfo(); String connectionType = ""; try { connectionType = this.getConnectionInfo(info).get("type").toString(); } catch (JSONException e) { LOG.d(LOG_TAG, e.getLocalizedMessage()); } PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, connectionType); pluginResult.setKeepCallback(true); callbackContext.sendPluginResult(pluginResult); return true; } return false; } /** * Stop network receiver. */ public void onDestroy() { //wifiMan.removeNetwork(satNetId); if (this.receiver != null) { try { webView.getContext().unregisterReceiver(this.receiver); } catch (Exception e) { LOG.e(LOG_TAG, "Error unregistering network receiver: " + e.getMessage(), e); } finally { receiver = null; } } } //-------------------------------------------------------------------------- // LOCAL METHODS //-------------------------------------------------------------------------- /** * Updates the JavaScript side whenever the connection changes * * @param info the current active network info * @return */ private void updateConnectionInfo(NetworkInfo info) { //Log.d("WifiPreference","SEE THIS: updateConnectionInfo just called"); Toast.makeText(cordova.getActivity().getApplicationContext(), "updateConnectionInfo() just called", Toast.LENGTH_SHORT).show(); // send update to javascript "navigator.network.connection" // Jellybean sends its own info JSONObject thisInfo = this.getConnectionInfo(info); if(!thisInfo.equals(lastInfo)) { String connectionType = ""; try { connectionType = thisInfo.get("type").toString(); } catch (JSONException e) { LOG.d(LOG_TAG, e.getLocalizedMessage()); } sendUpdate(connectionType); lastInfo = thisInfo; } //Log.d("WifiPreference", "HandlerCheckEnabled: "+handlerCheckEnabled); // Check current state of cellular connection if (!handlerCheckEnabled) { // First Check if cellular is enabled in settings if (checkCellularEnabled()) { // Check if cellular data is suspended (loss of cellular network) if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { for (Network networkInfo : sockMan.getAllNetworks()) { NetworkInfo i = sockMan.getNetworkInfo(networkInfo); Log.d(SWITCH_TAG, "Network Info: " + i); if (i != null) { if (i.getType() == sockMan.TYPE_MOBILE && i.getState() == NetworkInfo.State.SUSPENDED) { Log.d(SWITCH_TAG, "This network is mobile and suspended"); mobileDataEnabled = false; } } } } } else { mobileDataEnabled = false; } //Toast.makeText(cordova.getActivity().getApplicationContext(), "updateConnectionInfo() mobileDataEnabled: "+mobileDataEnabled, Toast.LENGTH_SHORT).show(); //Log.d("WifiPreference", "updateConnectionInfo: MobileDataEnabled: "+mobileDataEnabled); // Run handler logic when standard WiFi not active or when mobile data not available if (!mobileDataEnabled) { // Enable handler or keep handler running handlerCheckEnabled = true; //Log.d("WifiPreference", "handlerCheck was just renabled"); } } } /** * Get the latest network connection information * * @param info the current active network info * @return a JSONObject that represents the network info */ private JSONObject getConnectionInfo(NetworkInfo info) { String type = TYPE_NONE; String extraInfo = ""; if (info != null) { // If we are not connected to any network set type to none if (!info.isConnected()) { type = TYPE_NONE; } else { type = getType(info); } extraInfo = info.getExtraInfo(); } LOG.d(LOG_TAG, "Connection Type: " + type); LOG.d(LOG_TAG, "Connection Extra Info: " + extraInfo); JSONObject connectionInfo = new JSONObject(); try { connectionInfo.put("type", type); connectionInfo.put("extraInfo", extraInfo); } catch (JSONException e) { LOG.d(LOG_TAG, e.getLocalizedMessage()); } return connectionInfo; } /** * Create a new plugin result and send it back to JavaScript * * @param type the network info to set as navigator.connection */ private void sendUpdate(String type) { if (connectionCallbackContext != null) { PluginResult result = new PluginResult(PluginResult.Status.OK, type); result.setKeepCallback(true); connectionCallbackContext.sendPluginResult(result); } webView.postMessage("networkconnection", type); } /** * Determine the type of connection * * @param info the network info so we can determine connection type. * @return the type of mobile network we are on */ private String getType(NetworkInfo info) { if (info != null) { String type = info.getTypeName().toLowerCase(Locale.US); LOG.d(LOG_TAG, "toLower : " + type.toLowerCase()); LOG.d(LOG_TAG, "wifi : " + WIFI); if (type.equals(WIFI)) { return TYPE_WIFI; } else if (type.toLowerCase().equals(TYPE_ETHERNET) || type.toLowerCase().startsWith(TYPE_ETHERNET_SHORT)) { return TYPE_ETHERNET; } else if (type.equals(MOBILE) || type.equals(CELLULAR)) { type = info.getSubtypeName().toLowerCase(Locale.US); if (type.equals(GSM) || type.equals(GPRS) || type.equals(EDGE) || type.equals(TWO_G)) { return TYPE_2G; } else if (type.startsWith(CDMA) || type.equals(UMTS) || type.equals(ONEXRTT) || type.equals(EHRPD) || type.equals(HSUPA) || type.equals(HSDPA) || type.equals(HSPA) || type.equals(THREE_G)) { return TYPE_3G; } else if (type.equals(LTE) || type.equals(UMB) || type.equals(HSPA_PLUS) || type.equals(FOUR_G)) { return TYPE_4G; } } } else { return TYPE_NONE; } return TYPE_UNKNOWN; } /** * Timed runnable method for periodic cellular availability checks. * When cellular data is absent, handler method will periodicely attempt to restablish cellular connection. * Satellite hotspot will be used but standard WiFi is be preferred over satellite whenever possible. */ private Runnable runnable = new Runnable() { @Override public void run() { //checkCellularConnection(); if (handlerCheckEnabled) { Log.d(SWITCH_TAG, "handler checking cellular state"); Toast.makeText(cordova.getActivity().getApplicationContext(), "Handler Checking Cell State: "+mobileDataEnabled+" "+satDisabled, Toast.LENGTH_SHORT).show(); // First check if cellular is enabled in settings if (checkCellularEnabled()) { // Check if cellular data is suspended (loss of cellular network) wifiMan.setWifiEnabled(false); // Used to repeat handler check in order to allow time for configured networks to update if (handlerCellCheckEnabled) { handlerCellCheckEnabled = false; handler.postDelayed(this, 3000); // After WiFi updates check } handlerCellCheckEnabled = true; NetworkInfo handlerCellInfo = sockMan.getActiveNetworkInfo(); //Log.d("SEE THIS", "SEE THIS INFO: "+handlerCellInfo); if (handlerCellInfo.getType() == sockMan.TYPE_MOBILE && handlerCellInfo.getState() == NetworkInfo.State.CONNECTED) { mobileDataEnabled = true; //Log.d("SEE THIS", "SEE THIS HIT"); } wifiMan.setWifiEnabled(true); } else { mobileDataEnabled = false; } // Disable all satellite WiFi Access points and handler method if cellular is available if (mobileDataEnabled) { // Disable all "exp" SSIDs (disable all satellite terminals) if(!satDisabled) if (wifiMan.getConfiguredNetworks() != null) { for (WifiConfiguration preconfigDis2 : wifiMan.getConfiguredNetworks()) { if (preconfigDis2.SSID.contains(satSSID)) { Log.d(SWITCH_TAG, "3) disabling "+preconfigDis2.SSID); wifiMan.disableNetwork(preconfigDis2.networkId); } } satDisabled = true; } handlerCheckEnabled = false; //Log.d(SWITCH_TAG, "All Satellite WiFi AP disabled"); } else { // Otherwise enable all satellite terminals if(satDisabled) if (wifiMan.getConfiguredNetworks() != null) { for (WifiConfiguration preconfigEna : wifiMan.getConfiguredNetworks()) { //Log.d(SWITCH_TAG, "CHECKING if should enable SSID: "+preconfigEna.SSID); boolean attemptConnect = true; if (preconfigEna.SSID.contains(satSSID)) { Log.d(SWITCH_TAG, "enabling "+preconfigEna.SSID+" status: "+preconfigEna.status); wifiMan.enableNetwork(preconfigEna.networkId, true); if(attemptConnect) attemptConnect = false; } } satDisabled = false; } } } handler.postDelayed(this, handlerDelay); // Check again after every 50 seconds } }; public boolean checkCellularEnabled() { boolean isEnabled = true; try { Class cmClass = Class.forName(sockMan.getClass().getName()); Method method = cmClass.getDeclaredMethod("getMobileDataEnabled"); method.setAccessible(true); // Make the method callable // get the setting for "mobile data" isEnabled = (Boolean) method.invoke(sockMan); } catch (Exception e) { Log.d(SWITCH_TAG, "Cellular availability check failed"); } return isEnabled; } }
src/android/NetworkManager.java
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.cordova.networkinformation; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaInterface; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.LOG; import org.apache.cordova.PluginResult; import org.apache.cordova.CordovaWebView; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.net.ConnectivityManager; import android.net.Network; import android.net.NetworkCapabilities; import android.net.NetworkInfo; import android.net.NetworkRequest; import android.net.wifi.WifiConfiguration; import android.net.wifi.WifiManager; import android.os.Build; import android.os.Handler; import android.util.Log; import android.widget.Toast; import java.lang.reflect.Method; import java.util.Locale; public class NetworkManager extends CordovaPlugin { public static int NOT_REACHABLE = 0; public static int REACHABLE_VIA_CARRIER_DATA_NETWORK = 1; public static int REACHABLE_VIA_WIFI_NETWORK = 2; public static final String WIFI = "wifi"; public static final String WIMAX = "wimax"; // mobile public static final String MOBILE = "mobile"; // Android L calls this Cellular, because I have no idea! public static final String CELLULAR = "cellular"; // 2G network types public static final String TWO_G = "2g"; public static final String GSM = "gsm"; public static final String GPRS = "gprs"; public static final String EDGE = "edge"; // 3G network types public static final String THREE_G = "3g"; public static final String CDMA = "cdma"; public static final String UMTS = "umts"; public static final String HSPA = "hspa"; public static final String HSUPA = "hsupa"; public static final String HSDPA = "hsdpa"; public static final String ONEXRTT = "1xrtt"; public static final String EHRPD = "ehrpd"; // 4G network types public static final String FOUR_G = "4g"; public static final String LTE = "lte"; public static final String UMB = "umb"; public static final String HSPA_PLUS = "hspa+"; // return type public static final String TYPE_UNKNOWN = "unknown"; public static final String TYPE_ETHERNET = "ethernet"; public static final String TYPE_ETHERNET_SHORT = "eth"; public static final String TYPE_WIFI = "wifi"; public static final String TYPE_2G = "2g"; public static final String TYPE_3G = "3g"; public static final String TYPE_4G = "4g"; public static final String TYPE_NONE = "none"; private static final String LOG_TAG = "NetworkManager"; private CallbackContext connectionCallbackContext; ConnectivityManager sockMan; BroadcastReceiver receiver; private JSONObject lastInfo = null; WifiManager wifiMan; Handler handler; // Initiated when there is no cellular present private boolean mobileDataEnabled = true; // Assume enabled private boolean handlerCheckEnabled = false; // Indicates when to use handler private boolean prevWifiState; // State of WiFi private boolean handlerCellCheckEnabled = true; // Restart handler after disabling WiFi so cellular status is updated private boolean satDisabled = true; // Custom configuration settings private final String satSSID = "YodelMe"; // Satellite SSID private final int handlerDelay = 60000; // Run handler every 5 mins private static final String SWITCH_TAG = "WiFiPreference"; /** * Sets the context of the Command. This can then be used to do things like * get file paths associated with the Activity. * * @param cordova The context of the main Activity. * @param webView The CordovaWebView Cordova is running in. */ public void initialize(CordovaInterface cordova, CordovaWebView webView) { super.initialize(cordova, webView); this.sockMan = (ConnectivityManager) cordova.getActivity().getSystemService(Context.CONNECTIVITY_SERVICE); this.connectionCallbackContext = null; this.wifiMan = (WifiManager) cordova.getActivity().getApplicationContext().getSystemService(Context.WIFI_SERVICE); // We need to listen to connectivity events to update navigator.connection IntentFilter intentFilter = new IntentFilter(); intentFilter.addAction(ConnectivityManager.CONNECTIVITY_ACTION); if (this.receiver == null) { this.receiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { // (The null check is for the ARM Emulator, please use Intel Emulator for better results) if(NetworkManager.this.webView != null) updateConnectionInfo(sockMan.getActiveNetworkInfo()); } }; webView.getContext().registerReceiver(this.receiver, intentFilter); } // Initially disable all satellite Access Points for (WifiConfiguration preconfigDisInit: wifiMan.getConfiguredNetworks()) { if (preconfigDisInit.SSID.contains(satSSID)) { //Log.d(SWITCH_TAG, "1) disabling "+preconfigDisInit.SSID); wifiMan.disableNetwork(preconfigDisInit.networkId); } } satDisabled = true; // satellites are initally disabled; // Initialize timed handler which checks for cellular network availability handler = new Handler(); handler.postDelayed(runnable, handlerDelay); } /** * Executes the request and returns PluginResult. * * @param action The action to execute. * @param args JSONArry of arguments for the plugin. * @param callbackContext The callback id used when calling back into JavaScript. * @return True if the action was valid, false otherwise. */ public boolean execute(String action, JSONArray args, CallbackContext callbackContext) { if (action.equals("getConnectionInfo")) { this.connectionCallbackContext = callbackContext; NetworkInfo info = sockMan.getActiveNetworkInfo(); String connectionType = ""; try { connectionType = this.getConnectionInfo(info).get("type").toString(); } catch (JSONException e) { LOG.d(LOG_TAG, e.getLocalizedMessage()); } PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, connectionType); pluginResult.setKeepCallback(true); callbackContext.sendPluginResult(pluginResult); return true; } return false; } /** * Stop network receiver. */ public void onDestroy() { //wifiMan.removeNetwork(satNetId); if (this.receiver != null) { try { webView.getContext().unregisterReceiver(this.receiver); } catch (Exception e) { LOG.e(LOG_TAG, "Error unregistering network receiver: " + e.getMessage(), e); } finally { receiver = null; } } } //-------------------------------------------------------------------------- // LOCAL METHODS //-------------------------------------------------------------------------- /** * Updates the JavaScript side whenever the connection changes * * @param info the current active network info * @return */ private void updateConnectionInfo(NetworkInfo info) { //Log.d("WifiPreference","SEE THIS: updateConnectionInfo just called"); Toast.makeText(cordova.getActivity().getApplicationContext(), "updateConnectionInfo() just called", Toast.LENGTH_SHORT).show(); // send update to javascript "navigator.network.connection" // Jellybean sends its own info JSONObject thisInfo = this.getConnectionInfo(info); if(!thisInfo.equals(lastInfo)) { String connectionType = ""; try { connectionType = thisInfo.get("type").toString(); } catch (JSONException e) { LOG.d(LOG_TAG, e.getLocalizedMessage()); } sendUpdate(connectionType); lastInfo = thisInfo; } //Log.d("WifiPreference", "HandlerCheckEnabled: "+handlerCheckEnabled); // Check current state of cellular connection if (!handlerCheckEnabled) { // First Check if cellular is enabled in settings if (checkCellularEnabled()) { // Check if cellular data is suspended (loss of cellular network) if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { for (Network networkInfo : sockMan.getAllNetworks()) { NetworkInfo i = sockMan.getNetworkInfo(networkInfo); Log.d(SWITCH_TAG, "Network Info: " + i); if (i != null) { if (i.getType() == sockMan.TYPE_MOBILE && i.getState() == NetworkInfo.State.SUSPENDED) { Log.d(SWITCH_TAG, "This network is mobile and suspended"); mobileDataEnabled = false; } } } } } else { mobileDataEnabled = false; } //Toast.makeText(cordova.getActivity().getApplicationContext(), "updateConnectionInfo() mobileDataEnabled: "+mobileDataEnabled, Toast.LENGTH_SHORT).show(); //Log.d("WifiPreference", "updateConnectionInfo: MobileDataEnabled: "+mobileDataEnabled); // Run handler logic when standard WiFi not active or when mobile data not available if (!mobileDataEnabled) { // Enable handler or keep handler running handlerCheckEnabled = true; //Log.d("WifiPreference", "handlerCheck was just renabled"); } } } /** * Get the latest network connection information * * @param info the current active network info * @return a JSONObject that represents the network info */ private JSONObject getConnectionInfo(NetworkInfo info) { String type = TYPE_NONE; String extraInfo = ""; if (info != null) { // If we are not connected to any network set type to none if (!info.isConnected()) { type = TYPE_NONE; } else { type = getType(info); } extraInfo = info.getExtraInfo(); } LOG.d(LOG_TAG, "Connection Type: " + type); LOG.d(LOG_TAG, "Connection Extra Info: " + extraInfo); JSONObject connectionInfo = new JSONObject(); try { connectionInfo.put("type", type); connectionInfo.put("extraInfo", extraInfo); } catch (JSONException e) { LOG.d(LOG_TAG, e.getLocalizedMessage()); } return connectionInfo; } /** * Create a new plugin result and send it back to JavaScript * * @param type the network info to set as navigator.connection */ private void sendUpdate(String type) { if (connectionCallbackContext != null) { PluginResult result = new PluginResult(PluginResult.Status.OK, type); result.setKeepCallback(true); connectionCallbackContext.sendPluginResult(result); } webView.postMessage("networkconnection", type); } /** * Determine the type of connection * * @param info the network info so we can determine connection type. * @return the type of mobile network we are on */ private String getType(NetworkInfo info) { if (info != null) { String type = info.getTypeName().toLowerCase(Locale.US); LOG.d(LOG_TAG, "toLower : " + type.toLowerCase()); LOG.d(LOG_TAG, "wifi : " + WIFI); if (type.equals(WIFI)) { return TYPE_WIFI; } else if (type.toLowerCase().equals(TYPE_ETHERNET) || type.toLowerCase().startsWith(TYPE_ETHERNET_SHORT)) { return TYPE_ETHERNET; } else if (type.equals(MOBILE) || type.equals(CELLULAR)) { type = info.getSubtypeName().toLowerCase(Locale.US); if (type.equals(GSM) || type.equals(GPRS) || type.equals(EDGE) || type.equals(TWO_G)) { return TYPE_2G; } else if (type.startsWith(CDMA) || type.equals(UMTS) || type.equals(ONEXRTT) || type.equals(EHRPD) || type.equals(HSUPA) || type.equals(HSDPA) || type.equals(HSPA) || type.equals(THREE_G)) { return TYPE_3G; } else if (type.equals(LTE) || type.equals(UMB) || type.equals(HSPA_PLUS) || type.equals(FOUR_G)) { return TYPE_4G; } } } else { return TYPE_NONE; } return TYPE_UNKNOWN; } /** * Timed runnable method for periodic cellular availability checks. * When cellular data is absent, handler method will periodicely attempt to restablish cellular connection. * Satellite hotspot will be used but standard WiFi is be preferred over satellite whenever possible. */ private Runnable runnable = new Runnable() { @Override public void run() { //checkCellularConnection(); if (handlerCheckEnabled) { Log.d(SWITCH_TAG, "handler checking cellular state"); Toast.makeText(cordova.getActivity().getApplicationContext(), "Handler Checking Cell State: "+mobileDataEnabled+" "+satDisabled, Toast.LENGTH_SHORT).show(); // First check if cellular is enabled in settings if (checkCellularEnabled()) { // Check if cellular data is suspended (loss of cellular network) wifiMan.setWifiEnabled(false); // Used to repeat handler check in order to allow time for configured networks to update if (handlerCellCheckEnabled) { handlerCellCheckEnabled = false; handler.postDelayed(this, 3000); // After WiFi updates check } handlerCellCheckEnabled = true; NetworkInfo handlerCellInfo = sockMan.getActiveNetworkInfo(); //Log.d("SEE THIS", "SEE THIS INFO: "+handlerCellInfo); if (handlerCellInfo.getType() == sockMan.TYPE_MOBILE && handlerCellInfo.getState() == NetworkInfo.State.CONNECTED) { mobileDataEnabled = true; //Log.d("SEE THIS", "SEE THIS HIT"); } wifiMan.setWifiEnabled(true); } else { mobileDataEnabled = false; } // Disable all satellite WiFi Access points and handler method if cellular is available if (mobileDataEnabled) { // Disable all "exp" SSIDs (disable all satellite terminals) if(!satDisabled) if (wifiMan.getConfiguredNetworks() != null) { for (WifiConfiguration preconfigDis2 : wifiMan.getConfiguredNetworks()) { if (preconfigDis2.SSID.contains(satSSID)) { Log.d(SWITCH_TAG, "3) disabling "+preconfigDis2.SSID); wifiMan.disableNetwork(preconfigDis2.networkId); } } satDisabled = true; } handlerCheckEnabled = false; //Log.d(SWITCH_TAG, "All Satellite WiFi AP disabled"); } else { // Otherwise enable all satellite terminals if(satDisabled) if (wifiMan.getConfiguredNetworks() != null) { for (WifiConfiguration preconfigEna : wifiMan.getConfiguredNetworks()) { //Log.d(SWITCH_TAG, "CHECKING if should enable SSID: "+preconfigEna.SSID); boolean attemptConnect = true; if (preconfigEna.SSID.contains(satSSID)) { Log.d(SWITCH_TAG, "enabling "+preconfigEna.SSID+" status: "+preconfigEna.status); wifiMan.enableNetwork(preconfigEna.networkId, attemptConnect); if(attemptConnect) attemptConnect = false; } } satDisabled = false; } } } handler.postDelayed(this, handlerDelay); // Check again after every 50 seconds } }; public boolean checkCellularEnabled() { boolean isEnabled = true; try { Class cmClass = Class.forName(sockMan.getClass().getName()); Method method = cmClass.getDeclaredMethod("getMobileDataEnabled"); method.setAccessible(true); // Make the method callable // get the setting for "mobile data" isEnabled = (Boolean) method.invoke(sockMan); } catch (Exception e) { Log.d(SWITCH_TAG, "Cellular availability check failed"); } return isEnabled; } }
reset wifimanager enablenetwork method
src/android/NetworkManager.java
reset wifimanager enablenetwork method
Java
apache-2.0
8841ebe4f3d8ca55c139b4d7859f60df549a5517
0
marques-work/gocd,Skarlso/gocd,ketan/gocd,Skarlso/gocd,bdpiparva/gocd,ibnc/gocd,Skarlso/gocd,kierarad/gocd,gocd/gocd,marques-work/gocd,marques-work/gocd,Skarlso/gocd,kierarad/gocd,arvindsv/gocd,gocd/gocd,kierarad/gocd,GaneshSPatil/gocd,ibnc/gocd,arvindsv/gocd,ketan/gocd,GaneshSPatil/gocd,GaneshSPatil/gocd,Skarlso/gocd,arvindsv/gocd,arvindsv/gocd,GaneshSPatil/gocd,arvindsv/gocd,kierarad/gocd,gocd/gocd,bdpiparva/gocd,gocd/gocd,GaneshSPatil/gocd,ketan/gocd,marques-work/gocd,ibnc/gocd,bdpiparva/gocd,gocd/gocd,arvindsv/gocd,GaneshSPatil/gocd,Skarlso/gocd,marques-work/gocd,ibnc/gocd,bdpiparva/gocd,bdpiparva/gocd,kierarad/gocd,bdpiparva/gocd,ketan/gocd,marques-work/gocd,kierarad/gocd,ketan/gocd,ibnc/gocd,gocd/gocd,ketan/gocd,ibnc/gocd
/* * Copyright 2019 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.config.merge; import com.thoughtworks.go.config.*; import com.thoughtworks.go.config.remote.*; import com.thoughtworks.go.domain.ConfigErrors; import com.thoughtworks.go.domain.PiplineConfigVisitor; import org.apache.commons.lang3.StringUtils; import java.util.*; import static com.thoughtworks.go.util.ExceptionUtils.bomb; /** * @understands pipeline group configuration in many parts. * <p> * Composite of many pipeline configuration parts. */ @ConfigTag("pipelines") public class MergePipelineConfigs implements PipelineConfigs { @ConfigSubtag private PipelineConfigsPartials parts = new PipelineConfigsPartials(); private final ConfigErrors configErrors = new ConfigErrors(); public MergePipelineConfigs(PipelineConfigs... parts) { this.parts.addAll(Arrays.asList(parts)); validateGroupNameUniqueness(this.parts); } public MergePipelineConfigs(List<PipelineConfigs> parts) { this.parts.addAll(parts); validateGroupNameUniqueness(this.parts); } public void addPart(BasicPipelineConfigs pipelineConfigs) { if (!StringUtils.equals(pipelineConfigs.getGroup(), this.getGroup())) throw new IllegalArgumentException("Group names must be the same in merge"); this.parts.add(pipelineConfigs); } private void validateGroupNameUniqueness(List<PipelineConfigs> parts) { String name = parts.get(0).getGroup(); for (PipelineConfigs part : parts) { String otherName = part.getGroup(); if (!StringUtils.equals(otherName, name)) throw new IllegalArgumentException("Group names must be the same in merge"); } } public PipelineConfigs getAuthorizationPart() { PipelineConfigs found = this.getAuthorizationPartOrNull(); if (found == null) throw bomb("No valid configuration part to store authorization"); return found; } public PipelineConfigs getAuthorizationPartOrNull() { for (PipelineConfigs part : parts) { if (part.getOrigin() != null && part.getOrigin().isLocal()) return part; } return null; } public PipelineConfigs getPartWithPipeline(CaseInsensitiveString pipelineName) { for (PipelineConfigs part : parts) { if (part.hasPipeline(pipelineName)) return part; } return null; } public PipelineConfigs getFirstEditablePartOrNull() { for (PipelineConfigs part : parts) { if (isEditable(part)) return part; } return null; } public PipelineConfigs getFirstEditablePart() { PipelineConfigs found = getFirstEditablePartOrNull(); if (found == null) throw bomb("No editable configuration part"); return found; } @Override public void validate(ValidationContext validationContext) { this.validateGroupNameAndAddErrorsTo(this.configErrors); for (PipelineConfigs part : this.parts) { part.validate(validationContext); } verifyPipelineNameUniqueness(); } private void verifyPipelineNameUniqueness() { HashMap<CaseInsensitiveString, PipelineConfig> hashMap = new HashMap<>(); for (PipelineConfig pipelineConfig : this) { pipelineConfig.validateNameUniqueness(hashMap); } } @Override public void validateNameUniqueness(Map<String, PipelineConfigs> groupNameMap) { String currentName = sanitizedGroupName(this.getGroup()).toLowerCase(); PipelineConfigs groupWithSameName = groupNameMap.get(currentName); if (groupWithSameName == null) { groupNameMap.put(currentName, this); } else { groupWithSameName.addError(GROUP, createNameConflictError()); this.nameConflictError(); } } private void nameConflictError() { this.configErrors.add(GROUP, createNameConflictError()); } private String createNameConflictError() { return String.format("Group with name '%s' already exists", this.getGroup()); } public static String sanitizedGroupName(String group) { return StringUtils.isBlank(group) ? DEFAULT_GROUP : group; } @Override public ConfigOrigin getOrigin() { MergeConfigOrigin origins = new MergeConfigOrigin(); for (PipelineConfigs part : this.parts) { origins.add(part.getOrigin()); } return origins; } @Override public void setOrigins(ConfigOrigin origins) { throw bomb("Cannot set origins on merged config"); } @Override public PipelineConfig findBy(CaseInsensitiveString pipelineName) { for (PipelineConfigs part : this.parts) { PipelineConfig found = part.findBy(pipelineName); if (found != null) return found; } return null; } @Override public int size() { int count = 0; for (PipelineConfigs part : this.parts) { count += part.size(); } return count; } @Override public boolean isEmpty() { return size() == 0; } @Override public boolean hasRemoteParts() { return getOrigin() != null && !getOrigin().isLocal(); } @Override public boolean contains(PipelineConfig o) { for (PipelineConfigs part : this.parts) { if (part.contains(o)) return true; } return false; } @Override public void remove(PipelineConfig pipelineConfig) { PipelineConfigs part = this.getPartWithPipeline(pipelineConfig.name()); if (!isEditable(part)) throw bomb("Cannot remove pipeline fron non-editable configuration source"); part.remove(pipelineConfig); } @Override public PipelineConfig remove(int i) { if (i < 0) throw new IndexOutOfBoundsException(); int start = 0; for (PipelineConfigs part : this.parts) { int end = start + part.size(); if (i < end) return part.remove(i - start); start = end; } throw new IndexOutOfBoundsException(); } @Override public void validateGroupNameAndAddErrorsTo(ConfigErrors errors) { this.parts.get(0).validateGroupNameAndAddErrorsTo(errors); } public PipelineConfigs getLocal() { for (PipelineConfigs part : this.parts) { if (part.isLocal()) return part; } return null; } @Override public boolean isLocal() { return getOrigin() == null || getOrigin().isLocal(); } @Override public boolean add(PipelineConfig pipelineConfig) { verifyUniqueName(pipelineConfig); PipelineConfigs part = this.getFirstEditablePartOrNull(); if (part == null) throw bomb("No editable configuration sources"); return part.add(pipelineConfig); } private void verifyUniqueName(PipelineConfig pipelineConfig) { if (alreadyContains(pipelineConfig)) { throw bomb("You have defined multiple pipelines called '" + pipelineConfig.name() + "'. Pipeline names must be unique."); } } private boolean alreadyContains(PipelineConfig pipelineConfig) { for (PipelineConfigs part : this.parts) { if (part.hasPipeline(pipelineConfig.name())) return true; } return false; } public PipelineConfigs getPartWithIndex(int i) { if (i < 0) throw new IndexOutOfBoundsException(); int start = 0; for (PipelineConfigs part : this.parts) { int end = start + part.size(); if (i < end) return part; start = end; } throw new IndexOutOfBoundsException(); } public PipelineConfigs getPartWithIndexForInsert(int i) { if (i < 0) throw new IndexOutOfBoundsException(); int start = 0; for (PipelineConfigs part : this.parts) { int end = start + part.size(); if (i < end) return part; start = end; } return this.parts.get(this.parts.size() - 1); } @Override public PipelineConfig get(int i) { if (i < 0) throw new IndexOutOfBoundsException(); int start = 0; for (PipelineConfigs part : this.parts) { int end = start + part.size(); if (i < end) return part.get(i - start); start = end; } throw new IndexOutOfBoundsException(); } @Override public boolean addWithoutValidation(PipelineConfig pipelineConfig) { PipelineConfigs part = this.getFirstEditablePartOrNull(); if (part == null) throw bomb("No editable configuration sources"); return part.addWithoutValidation(pipelineConfig); } @Override public PipelineConfig set(int i, PipelineConfig pipelineConfig) { if (i < 0) throw new IndexOutOfBoundsException(); int start = 0; for (PipelineConfigs part : this.parts) { int end = start + part.size(); if (i < end) { if (isEditable(part)) { return part.set(i - start, pipelineConfig); } else { throw bomb(String.format("Cannot edit pipeline %s", pipelineConfig.name())); } } start = end; } throw new IndexOutOfBoundsException(); } @Override public void addToTop(PipelineConfig pipelineConfig) { PipelineConfigs part = this.getFirstEditablePart(); part.addToTop(pipelineConfig); } @Override public void add(int index, PipelineConfig pipelineConfig) { PipelineConfigs part = getPartWithIndexForInsert(index); if (!isEditable(part)) throw bomb("Cannot add pipeline to non-editable configuration part"); int start = getFirstIndexInPart(part); part.add(index - start, pipelineConfig); } private int getFirstIndexInPart(PipelineConfigs p) { int start = 0; for (PipelineConfigs part : this.parts) { int end = start + part.size(); if (part.equals(p)) return start; start = end; } return -1; } @Override public int indexOf(PipelineConfig o) { int start = 0; for (PipelineConfigs part : this.parts) { int end = start + part.size(); int internalIndex = part.indexOf(o); if (internalIndex > 0) return start + internalIndex; start = end; } return -1; } @Override public Iterator<PipelineConfig> iterator() { return new Iterator<PipelineConfig>() { private int currentIndex = 0; private int count = size(); @Override public boolean hasNext() { return currentIndex < count; } @Override public PipelineConfig next() { return get(currentIndex++); } @Override public void remove() { throw new RuntimeException("Not implemented"); } }; } @Override public String getGroup() { return this.parts.get(0).getGroup(); } @Override public void setGroup(String group) { if (group.equals(this.getGroup())) { return; } for (PipelineConfigs part : this.parts) { if (!isEditable(part)) { throw bomb("Cannot update group name because there are non-editable parts"); } } for (PipelineConfigs part : this.parts) { part.setGroup(group); } } private boolean isEditable(PipelineConfigs part) { return part.getOrigin() != null && part.getOrigin().canEdit(); } @Override public boolean isNamed(String groupName) { return this.isSameGroup(groupName); } public void update(String groupName, PipelineConfig pipeline, String pipelineName) { if (!isSameGroup(groupName)) { return; } this.set(getIndex(pipelineName), pipeline); } private boolean isSameGroup(String groupName) { return StringUtils.equals(groupName, this.getGroup()); } private int getIndex(String pipelineName) { CaseInsensitiveString caseName = new CaseInsensitiveString(pipelineName); int start = 0; for (PipelineConfigs part : this.parts) { int end = start + part.size(); if (part.hasPipeline(caseName)) { int internalIndex = part.indexOf(part.findBy(caseName)); return start + internalIndex; } start = end; } return -1; } @Override public boolean save(PipelineConfig pipeline, String groupName) { if (isSameGroup(groupName)) { this.addToTop(pipeline); return true; } else { return false; } } @Override public void add(List<String> allGroup) { allGroup.add(this.getGroup()); } @Override public boolean exist(int pipelineIndex) { throw new RuntimeException("Not implemented"); } @Override public boolean hasPipeline(CaseInsensitiveString pipelineName) { for (PipelineConfigs part : this.parts) { if (part.hasPipeline(pipelineName)) return true; } return false; } @Override public void accept(PiplineConfigVisitor visitor) { for (PipelineConfig pipelineConfig : this) { visitor.visit(pipelineConfig); } } @Override public boolean hasTemplate() { for (PipelineConfigs part : this.parts) { if (part.hasTemplate()) return true; } return false; } @Override public PipelineConfigs getCopyForEditing() { List<PipelineConfigs> parts = new ArrayList<>(); for (PipelineConfigs part : this.parts) { parts.add(part.getCopyForEditing()); } return new MergePipelineConfigs(parts); } @Override public boolean isUserAnAdmin(CaseInsensitiveString userName, List<Role> memberRoles) { return this.getAuthorizationPart().isUserAnAdmin(userName, memberRoles); } @Override public ConfigErrors errors() { return configErrors; } @Override public List<PipelineConfig> getPipelines() { List<PipelineConfig> list = new ArrayList<>(); for (PipelineConfig pipe : this) { list.add(pipe); } return list; } @Override public void addError(String fieldName, String message) { configErrors.add(fieldName, message); } @Override public void setConfigAttributes(Object attributes) { Map attributeMap = (Map) attributes; if (attributeMap == null) { return; } if (attributeMap.containsKey(GROUP)) { String group = (String) attributeMap.get(GROUP); this.setGroup(group); } if (attributeMap.containsKey(AUTHORIZATION) || attributeMap.isEmpty()) { PipelineConfigs authorizationPart = this.getAuthorizationPart(); authorizationPart.setConfigAttributes(attributes); } } @Override public List<AdminUser> getOperateUsers() { return this.getAuthorizationPart().getOperateUsers(); } @Override public List<AdminRole> getOperateRoles() { return this.getAuthorizationPart().getOperateRoles(); } @Override public List<String> getOperateRoleNames() { return this.getAuthorizationPart().getOperateRoleNames(); } @Override public List<String> getOperateUserNames() { return this.getAuthorizationPart().getOperateUserNames(); } @Override public void cleanupAllUsagesOfRole(Role roleToDelete) { this.getAuthorizationPart().cleanupAllUsagesOfRole(roleToDelete); } @Override public boolean hasAuthorizationDefined() { PipelineConfigs authPart = this.getAuthorizationPartOrNull(); if (authPart == null) return false; return authPart.hasAuthorizationDefined(); } @Override public Authorization getAuthorization() { return this.getAuthorizationPart().getAuthorization(); } @Override public void setAuthorization(Authorization authorization) { this.getAuthorizationPart().setAuthorization(authorization); } @Override public boolean hasViewPermission(CaseInsensitiveString username, UserRoleMatcher userRoleMatcher) { return this.getAuthorizationPart().hasViewPermission(username, userRoleMatcher); } @Override public boolean hasViewPermissionDefined() { PipelineConfigs authPart = this.getAuthorizationPartOrNull(); if (authPart == null) return false; return authPart.hasViewPermissionDefined(); } @Override public boolean hasOperationPermissionDefined() { PipelineConfigs authPart = this.getAuthorizationPartOrNull(); if (authPart == null) return false; return authPart.hasOperationPermissionDefined(); } @Override public boolean hasOperatePermission(CaseInsensitiveString username, UserRoleMatcher userRoleMatcher) { return this.getAuthorizationPart().hasOperatePermission(username, userRoleMatcher); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; MergePipelineConfigs that = (MergePipelineConfigs) o; return Objects.equals(parts, that.parts); } @Override public int hashCode() { return Objects.hash(parts); } }
config/config-api/src/main/java/com/thoughtworks/go/config/merge/MergePipelineConfigs.java
/* * Copyright 2019 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.config.merge; import com.thoughtworks.go.config.*; import com.thoughtworks.go.config.remote.*; import com.thoughtworks.go.domain.ConfigErrors; import com.thoughtworks.go.domain.PiplineConfigVisitor; import org.apache.commons.lang3.StringUtils; import java.util.*; import static com.thoughtworks.go.util.ExceptionUtils.bomb; /** * @understands pipeline group configuration in many parts. * * Composite of many pipeline configuration parts. */ @ConfigTag("pipelines") public class MergePipelineConfigs implements PipelineConfigs { @ConfigSubtag private PipelineConfigsPartials parts = new PipelineConfigsPartials(); private final ConfigErrors configErrors = new ConfigErrors(); public MergePipelineConfigs(PipelineConfigs... parts) { this.parts.addAll(Arrays.asList(parts)); validateGroupNameUniqueness(this.parts); } public MergePipelineConfigs(List<PipelineConfigs> parts) { this.parts.addAll(parts); validateGroupNameUniqueness(this.parts); } public void addPart(BasicPipelineConfigs pipelineConfigs) { if (!StringUtils.equals(pipelineConfigs.getGroup(), this.getGroup())) throw new IllegalArgumentException("Group names must be the same in merge"); this.parts.add(pipelineConfigs); } private void validateGroupNameUniqueness(List<PipelineConfigs> parts) { String name = parts.get(0).getGroup(); for (PipelineConfigs part : parts) { String otherName = part.getGroup(); if (!StringUtils.equals(otherName, name)) throw new IllegalArgumentException("Group names must be the same in merge"); } } public PipelineConfigs getAuthorizationPart() { PipelineConfigs found = this.getAuthorizationPartOrNull(); if(found == null) throw bomb("No valid configuration part to store authorization"); return found; } public PipelineConfigs getAuthorizationPartOrNull() { for(PipelineConfigs part : parts) { if(part.getOrigin() != null && part.getOrigin().isLocal()) return part; } return null; } public PipelineConfigs getPartWithPipeline(CaseInsensitiveString pipelineName) { for(PipelineConfigs part : parts) { if(part.hasPipeline(pipelineName)) return part; } return null; } public PipelineConfigs getFirstEditablePartOrNull() { for(PipelineConfigs part : parts) { if(isEditable(part)) return part; } return null; } public PipelineConfigs getFirstEditablePart() { PipelineConfigs found = getFirstEditablePartOrNull(); if(found == null) throw bomb("No editable configuration part"); return found; } @Override public void validate(ValidationContext validationContext) { this.validateGroupNameAndAddErrorsTo(this.configErrors); for(PipelineConfigs part : this.parts) { part.validate(validationContext); } verifyPipelineNameUniqueness(); } private void verifyPipelineNameUniqueness() { HashMap<CaseInsensitiveString, PipelineConfig> hashMap = new HashMap<>(); for(PipelineConfig pipelineConfig : this){ pipelineConfig.validateNameUniqueness(hashMap); } } @Override public void validateNameUniqueness(Map<String, PipelineConfigs> groupNameMap) { String currentName = sanitizedGroupName(this.getGroup()).toLowerCase(); PipelineConfigs groupWithSameName = groupNameMap.get(currentName); if (groupWithSameName == null) { groupNameMap.put(currentName, this); } else { groupWithSameName.addError(GROUP, createNameConflictError()); this.nameConflictError(); } } private void nameConflictError() { this.configErrors.add(GROUP, createNameConflictError()); } private String createNameConflictError() { return String.format("Group with name '%s' already exists", this.getGroup()); } public static String sanitizedGroupName(String group) { return StringUtils.isBlank(group) ? DEFAULT_GROUP : group; } @Override public ConfigOrigin getOrigin() { MergeConfigOrigin origins = new MergeConfigOrigin(); for(PipelineConfigs part : this.parts) { origins.add(part.getOrigin()); } return origins; } @Override public void setOrigins(ConfigOrigin origins) { throw bomb("Cannot set origins on merged config"); } @Override public PipelineConfig findBy(CaseInsensitiveString pipelineName) { for (PipelineConfigs part : this.parts) { PipelineConfig found = part.findBy(pipelineName); if(found != null) return found; } return null; } @Override public int size() { int count = 0; for (PipelineConfigs part : this.parts) { count += part.size(); } return count; } @Override public boolean isEmpty() { return size() == 0; } @Override public boolean hasRemoteParts() { return getOrigin() != null && !getOrigin().isLocal(); } @Override public boolean contains(PipelineConfig o) { for (PipelineConfigs part : this.parts) { if(part.contains(o)) return true; } return false; } @Override public void remove(PipelineConfig pipelineConfig) { PipelineConfigs part = this.getPartWithPipeline(pipelineConfig.name()); if(!isEditable(part)) throw bomb("Cannot remove pipeline fron non-editable configuration source"); part.remove(pipelineConfig); } @Override public PipelineConfig remove(int i) { if(i < 0) throw new IndexOutOfBoundsException(); int start =0; for (PipelineConfigs part : this.parts) { int end = start + part.size(); if(i < end) return part.remove(i - start); start = end; } throw new IndexOutOfBoundsException(); } @Override public void validateGroupNameAndAddErrorsTo(ConfigErrors errors) { this.parts.get(0).validateGroupNameAndAddErrorsTo(errors); } public PipelineConfigs getLocal() { for (PipelineConfigs part : this.parts) { if(part.isLocal()) return part; } return null; } @Override public boolean isLocal() { return getOrigin() == null || getOrigin().isLocal(); } @Override public boolean add(PipelineConfig pipelineConfig) { verifyUniqueName(pipelineConfig); PipelineConfigs part = this.getFirstEditablePartOrNull(); if(part == null) throw bomb("No editable configuration sources"); return part.add(pipelineConfig); } private void verifyUniqueName(PipelineConfig pipelineConfig) { if (alreadyContains(pipelineConfig)) { throw bomb("You have defined multiple pipelines called '" + pipelineConfig.name() + "'. Pipeline names must be unique."); } } private boolean alreadyContains(PipelineConfig pipelineConfig) { for (PipelineConfigs part : this.parts) { if(part.hasPipeline(pipelineConfig.name())) return true; } return false; } public PipelineConfigs getPartWithIndex(int i) { if(i < 0) throw new IndexOutOfBoundsException(); int start =0; for (PipelineConfigs part : this.parts) { int end = start + part.size(); if(i < end) return part; start = end; } throw new IndexOutOfBoundsException(); } public PipelineConfigs getPartWithIndexForInsert(int i) { if(i < 0) throw new IndexOutOfBoundsException(); int start =0; for (PipelineConfigs part : this.parts) { int end = start + part.size(); if(i < end) return part; start = end; } return this.parts.get(this.parts.size() -1); } @Override public PipelineConfig get(int i) { if(i < 0) throw new IndexOutOfBoundsException(); int start =0; for (PipelineConfigs part : this.parts) { int end = start + part.size(); if(i < end) return part.get(i - start); start = end; } throw new IndexOutOfBoundsException(); } @Override public boolean addWithoutValidation(PipelineConfig pipelineConfig) { PipelineConfigs part = this.getFirstEditablePartOrNull(); if(part == null) throw bomb("No editable configuration sources"); return part.addWithoutValidation(pipelineConfig); } @Override public PipelineConfig set(int i, PipelineConfig pipelineConfig) { if(i < 0) throw new IndexOutOfBoundsException(); int start =0; for (PipelineConfigs part : this.parts) { int end = start + part.size(); if(i < end) { if(isEditable(part)) { return part.set(i - start, pipelineConfig); } else { throw bomb(String.format("Cannot edit pipeline %s", pipelineConfig.name())); } } start = end; } throw new IndexOutOfBoundsException(); } @Override public void addToTop(PipelineConfig pipelineConfig) { PipelineConfigs part = this.getFirstEditablePart(); part.addToTop(pipelineConfig); } @Override public void add(int index, PipelineConfig pipelineConfig) { PipelineConfigs part = getPartWithIndexForInsert(index); if(!isEditable(part)) throw bomb("Cannot add pipeline to non-editable configuration part"); int start = getFirstIndexInPart(part); part.add(index - start, pipelineConfig); } private int getFirstIndexInPart(PipelineConfigs p) { int start =0; for (PipelineConfigs part : this.parts) { int end = start + part.size(); if(part.equals(p)) return start; start = end; } return -1; } @Override public int indexOf(PipelineConfig o) { int start =0; for (PipelineConfigs part : this.parts) { int end = start + part.size(); int internalIndex = part.indexOf(o); if(internalIndex > 0) return start + internalIndex; start = end; } return -1; } @Override public Iterator<PipelineConfig> iterator() { return new Iterator<PipelineConfig>() { private int currentIndex = 0; private int count = size(); @Override public boolean hasNext() { return currentIndex < count; } @Override public PipelineConfig next() { return get(currentIndex++); } @Override public void remove() { throw new RuntimeException("Not implemented"); } }; } @Override public String getGroup() { return this.parts.get(0).getGroup(); } @Override public void setGroup(String group) { if(group.equals(this.getGroup())) { return; } for(PipelineConfigs part : this.parts) { if(!isEditable(part)) { throw bomb("Cannot update group name because there are non-editable parts"); } } for(PipelineConfigs part : this.parts) { part.setGroup(group); } } private boolean isEditable(PipelineConfigs part) { return part.getOrigin() != null && part.getOrigin().canEdit(); } @Override public boolean isNamed(String groupName) { return this.isSameGroup(groupName); } public void update(String groupName, PipelineConfig pipeline, String pipelineName) { if (!isSameGroup(groupName)) { return; } this.set(getIndex(pipelineName), pipeline); } private boolean isSameGroup(String groupName) { return StringUtils.equals(groupName, this.getGroup()); } private int getIndex(String pipelineName) { CaseInsensitiveString caseName = new CaseInsensitiveString(pipelineName); int start =0; for (PipelineConfigs part : this.parts) { int end = start + part.size(); if(part.hasPipeline(caseName)) { int internalIndex = part.indexOf(part.findBy(caseName)); return start + internalIndex; } start = end; } return -1; } @Override public boolean save(PipelineConfig pipeline, String groupName) { if (isSameGroup(groupName)) { this.addToTop(pipeline); return true; } else { return false; } } @Override public void add(List<String> allGroup) { allGroup.add(this.getGroup()); } @Override public boolean exist(int pipelineIndex) { throw new RuntimeException("Not implemented"); } @Override public boolean hasPipeline(CaseInsensitiveString pipelineName) { for (PipelineConfigs part : this.parts) { if(part.hasPipeline(pipelineName)) return true; } return false; } @Override public void accept(PiplineConfigVisitor visitor) { for (PipelineConfig pipelineConfig : this) { visitor.visit(pipelineConfig); } } @Override public boolean hasTemplate() { for(PipelineConfigs part : this.parts) { if(part.hasTemplate()) return true; } return false; } @Override public PipelineConfigs getCopyForEditing() { List<PipelineConfigs> parts = new ArrayList<>(); for(PipelineConfigs part : this.parts) { parts.add(part.getCopyForEditing()); } return new MergePipelineConfigs(parts); } @Override public boolean isUserAnAdmin(CaseInsensitiveString userName, List<Role> memberRoles) { return this.getAuthorizationPart().isUserAnAdmin(userName,memberRoles); } @Override public ConfigErrors errors() { return configErrors; } @Override public List<PipelineConfig> getPipelines() { List<PipelineConfig> list = new ArrayList<>(); for(PipelineConfig pipe : this) { list.add(pipe); } return list; } @Override public void addError(String fieldName, String message) { configErrors.add(fieldName, message); } @Override public void setConfigAttributes(Object attributes) { Map attributeMap = (Map) attributes; if (attributeMap == null) { return; } if (attributeMap.containsKey(GROUP)) { String group = (String) attributeMap.get(GROUP); this.setGroup(group); } if (attributeMap.containsKey(AUTHORIZATION) || attributeMap.isEmpty()) { PipelineConfigs authorizationPart = this.getAuthorizationPart(); authorizationPart.setConfigAttributes(attributes); } } @Override public List<AdminUser> getOperateUsers() { return this.getAuthorizationPart().getOperateUsers(); } @Override public List<AdminRole> getOperateRoles() { return this.getAuthorizationPart().getOperateRoles(); } @Override public List<String> getOperateRoleNames() { return this.getAuthorizationPart().getOperateRoleNames(); } @Override public List<String> getOperateUserNames() { return this.getAuthorizationPart().getOperateUserNames(); } @Override public void cleanupAllUsagesOfRole(Role roleToDelete) { this.getAuthorizationPart().cleanupAllUsagesOfRole(roleToDelete); } @Override public boolean hasAuthorizationDefined() { PipelineConfigs authPart = this.getAuthorizationPartOrNull(); if(authPart == null) return false; return authPart.hasAuthorizationDefined(); } @Override public Authorization getAuthorization() { return this.getAuthorizationPart().getAuthorization(); } @Override public void setAuthorization(Authorization authorization) { this.getAuthorizationPart().setAuthorization(authorization); } @Override public boolean hasViewPermission(CaseInsensitiveString username, UserRoleMatcher userRoleMatcher) { return this.getAuthorizationPart().hasViewPermission(username, userRoleMatcher); } @Override public boolean hasViewPermissionDefined() { PipelineConfigs authPart = this.getAuthorizationPartOrNull(); if (authPart == null) return false; return authPart.hasViewPermissionDefined(); } @Override public boolean hasOperationPermissionDefined() { PipelineConfigs authPart = this.getAuthorizationPartOrNull(); if (authPart == null) return false; return authPart.hasOperationPermissionDefined(); } @Override public boolean hasOperatePermission(CaseInsensitiveString username, UserRoleMatcher userRoleMatcher) { return this.getAuthorizationPart().hasOperatePermission(username, userRoleMatcher); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; MergePipelineConfigs that = (MergePipelineConfigs) o; return Objects.equals(parts, that.parts); } @Override public int hashCode() { return Objects.hash(parts); } }
IDEA reformatting
config/config-api/src/main/java/com/thoughtworks/go/config/merge/MergePipelineConfigs.java
IDEA reformatting
Java
apache-2.0
de5106af6bb77f802d31d40b403851b61aa01d83
0
skptl/speech-android-sdk,watson-developer-cloud/speech-android-sdk,jithsjoy/speech-android-sdk
package com.ibm.cio.opus; import android.os.Environment; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.util.Random; import org.xiph.speex.AudioFileWriter; import org.xiph.speex.OggCrc; import com.ibm.cio.audio.ChuckWebSocketUploader; import com.ibm.cio.audio.SpeechConfiguration; import com.ibm.cio.util.Logger; public class OpusWriter extends AudioFileWriter { private String TAG = this.getClass().getSimpleName(); private ChuckWebSocketUploader client; /** Number of packets in an Ogg page (must be less than 255) */ public static final int PACKETS_PER_OGG_PAGE = 50; /** Defines the sampling rate of the audio input. */ protected int sampleRate; /** Ogg Stream Serial Number */ protected int streamSerialNumber; /** Data buffer */ private byte[] dataBuffer; /** Pointer within the Data buffer */ private int dataBufferPtr; /** Header buffer */ private byte[] headerBuffer; /** Pointer within the Header buffer */ private int headerBufferPtr; /** Ogg Page count */ protected int pageCount; /** Opus packet count within an Ogg Page */ private int packetCount; /** * Absolute granule position * (the number of audio samples from beginning of file to end of Ogg Packet). */ private long granulepos; /** Frame size */ private int frameSize; public OpusWriter(){} /** * Setting up the OggOpus Writer * @param client */ public OpusWriter(ChuckWebSocketUploader client){ this.client = client; if (streamSerialNumber == 0) streamSerialNumber = new Random().nextInt(); dataBuffer = new byte[65565]; dataBufferPtr = 0; headerBuffer = new byte[255]; headerBufferPtr = 0; pageCount = 0; packetCount = 0; granulepos = 0; sampleRate = 16000; this.frameSize = SpeechConfiguration.FRAME_SIZE; } @Override public void close() throws IOException { Logger.d(TAG, "Opus Writer Closing..."); flush(true); // this.closeFile(); this.client.stop(); } @Override public void open(File file) throws IOException {} @Override public void open(String filename) throws IOException {} @Override public void writeHeader(String comment) { // this.createFile(); Logger.d(TAG, "Opus Writer Headering..."); byte[] header; byte[] data; int chksum; /* writes the OGG header page */ header = buildOggPageHeader(2, 0, streamSerialNumber, pageCount++, 1, new byte[] {19}); data = buildOpusHeader(sampleRate); chksum = OggCrc.checksum(0, header, 0, header.length); chksum = OggCrc.checksum(chksum, data, 0, data.length); writeInt(header, 22, chksum); this.write(header); this.write(data); /* Writes the OGG comment page */ header = buildOggPageHeader(0, 0, streamSerialNumber, pageCount++, 1, new byte[]{(byte) (comment.length() + 8)}); data = buildOpusComment(comment); chksum = OggCrc.checksum(0, header, 0, header.length); chksum = OggCrc.checksum(chksum, data, 0, data.length); writeInt(header, 22, chksum); this.write(header); this.write(data); } @Override public void writePacket(byte[] data, int offset, int len) throws IOException { // if nothing to write if (len <= 0) { return; } // System.out.println("PACKETS_PER_OGG_PAGE=" + PACKETS_PER_OGG_PAGE + ", packetCount=" + packetCount + ", granulepos=" + granulepos); if (packetCount > PACKETS_PER_OGG_PAGE) { flush(false); } System.arraycopy(data, offset, dataBuffer, dataBufferPtr, len); dataBufferPtr += len; headerBuffer[headerBufferPtr++]=(byte)len; packetCount++; granulepos += this.frameSize*2; } /** * Flush the Ogg page out of the buffers into the file. * @param eos - end of stream * @exception IOException */ protected void flush(final boolean eos) throws IOException{ int chksum; byte[] header; /* Writes the OGG header page */ header = buildOggPageHeader((eos ? 4 : 0), granulepos, streamSerialNumber, pageCount++, packetCount, headerBuffer); chksum = OggCrc.checksum(0, header, 0, header.length); chksum = OggCrc.checksum(chksum, dataBuffer, 0, dataBufferPtr); writeInt(header, 22, chksum); this.write(header); this.write(dataBuffer, 0, dataBufferPtr); dataBufferPtr = 0; headerBufferPtr = 0; packetCount = 0; } public void write(byte[] data){ // Logger.d(TAG, "Opus Writer Writing...[" + data.length + "]"); // this.writeFile(data); this.client.upload(data); } public void write(byte[] data, int offset, int count){ // Logger.d(TAG, "Opus Writer Writing...["+data.length+", "+offset+", "+count+"]"); byte[] tmp = new byte[count]; System.arraycopy(data, offset, tmp, 0, count); // this.writeFile(tmp); this.client.upload(tmp); } // ################# FOR TESTING PURPOSE ################# // File myFile = null; FileOutputStream fos = null; public String getBaseDir() { String baseDir = ""; if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) { baseDir = Environment.getExternalStorageDirectory().getAbsolutePath() + "/"; } return baseDir; } private void createFile(){ myFile = new File(this.getBaseDir()+"WatsonR.opus"); try { myFile.deleteOnExit(); myFile.createNewFile(); fos = new FileOutputStream(myFile); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } private void writeFile(byte[] data){ try { fos.write(data); } catch (IOException e) { e.printStackTrace(); } } private void closeFile(){ try { fos.flush(); fos.close(); Logger.w(TAG, "Encoded file size=" + myFile.length()); } catch (IOException e) { e.printStackTrace(); } } }
speech-android-wrapper/src/main/java/com/ibm/cio/opus/OpusWriter.java
package com.ibm.cio.opus; import android.os.Environment; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.util.Random; import org.xiph.speex.AudioFileWriter; import org.xiph.speex.OggCrc; import com.ibm.cio.audio.ChuckWebSocketUploader; import com.ibm.cio.audio.SpeechConfiguration; import com.ibm.cio.util.Logger; public class OpusWriter extends AudioFileWriter { private String TAG = this.getClass().getSimpleName(); private ChuckWebSocketUploader client; /** Number of packets in an Ogg page (must be less than 255) */ public static final int PACKETS_PER_OGG_PAGE = 50; /** Defines the sampling rate of the audio input. */ protected int sampleRate; /** Ogg Stream Serial Number */ protected int streamSerialNumber; /** Data buffer */ private byte[] dataBuffer; /** Pointer within the Data buffer */ private int dataBufferPtr; /** Header buffer */ private byte[] headerBuffer; /** Pointer within the Header buffer */ private int headerBufferPtr; /** Ogg Page count */ protected int pageCount; /** Opus packet count within an Ogg Page */ private int packetCount; /** * Absolute granule position * (the number of audio samples from beginning of file to end of Ogg Packet). */ private long granulepos; /** Frame size */ private int frameSize; public OpusWriter(){} /** * Setting up the OggOpus Writer * @param client */ public OpusWriter(ChuckWebSocketUploader client){ this.client = client; if (streamSerialNumber == 0) streamSerialNumber = new Random().nextInt(); dataBuffer = new byte[65565]; dataBufferPtr = 0; headerBuffer = new byte[255]; headerBufferPtr = 0; pageCount = 0; packetCount = 0; granulepos = 0; sampleRate = 16000; this.frameSize = SpeechConfiguration.FRAME_SIZE; } @Override public void close() throws IOException { Logger.d(TAG, "Opus Writer Closing..."); flush(true); this.closeFile(); this.client.stop(); } @Override public void open(File file) throws IOException {} @Override public void open(String filename) throws IOException {} @Override public void writeHeader(String comment) { this.createFile(); Logger.d(TAG, "Opus Writer Headering..."); byte[] header; byte[] data; int chksum; /* writes the OGG header page */ header = buildOggPageHeader(2, 0, streamSerialNumber, pageCount++, 1, new byte[] {19}); data = buildOpusHeader(sampleRate); chksum = OggCrc.checksum(0, header, 0, header.length); chksum = OggCrc.checksum(chksum, data, 0, data.length); writeInt(header, 22, chksum); this.write(header); this.write(data); /* Writes the OGG comment page */ header = buildOggPageHeader(0, 0, streamSerialNumber, pageCount++, 1, new byte[]{(byte) (comment.length() + 8)}); data = buildOpusComment(comment); chksum = OggCrc.checksum(0, header, 0, header.length); chksum = OggCrc.checksum(chksum, data, 0, data.length); writeInt(header, 22, chksum); this.write(header); this.write(data); } @Override public void writePacket(byte[] data, int offset, int len) throws IOException { // if nothing to write if (len <= 0) { return; } // System.out.println("PACKETS_PER_OGG_PAGE=" + PACKETS_PER_OGG_PAGE + ", packetCount=" + packetCount + ", granulepos=" + granulepos); if (packetCount > PACKETS_PER_OGG_PAGE) { flush(false); } System.arraycopy(data, offset, dataBuffer, dataBufferPtr, len); dataBufferPtr += len; headerBuffer[headerBufferPtr++]=(byte)len; packetCount++; granulepos += this.frameSize*2; } /** * Flush the Ogg page out of the buffers into the file. * @param eos - end of stream * @exception IOException */ protected void flush(final boolean eos) throws IOException{ int chksum; byte[] header; /* Writes the OGG header page */ header = buildOggPageHeader((eos ? 4 : 0), granulepos, streamSerialNumber, pageCount++, packetCount, headerBuffer); chksum = OggCrc.checksum(0, header, 0, header.length); chksum = OggCrc.checksum(chksum, dataBuffer, 0, dataBufferPtr); writeInt(header, 22, chksum); this.write(header); this.write(dataBuffer, 0, dataBufferPtr); dataBufferPtr = 0; headerBufferPtr = 0; packetCount = 0; } public void write(byte[] data){ Logger.d(TAG, "Opus Writer Writing...[" + data.length + "]"); this.writeFile(data); this.client.upload(data); } public void write(byte[] data, int offset, int count){ Logger.d(TAG, "Opus Writer Writing...["+data.length+", "+offset+", "+count+"]"); byte[] tmp = new byte[count]; System.arraycopy(data, offset, tmp, 0, count); this.writeFile(tmp); this.client.upload(tmp); } // ################# FOR TESTING PURPOSE ################# // File myFile = null; FileOutputStream fos = null; public String getBaseDir() { String baseDir = ""; if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) { baseDir = Environment.getExternalStorageDirectory().getAbsolutePath() + "/"; } return baseDir; } private void createFile(){ myFile = new File(this.getBaseDir()+"WatsonR.opus"); try { myFile.deleteOnExit(); myFile.createNewFile(); fos = new FileOutputStream(myFile); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } private void writeFile(byte[] data){ try { fos.write(data); } catch (IOException e) { e.printStackTrace(); } } private void closeFile(){ try { fos.flush(); fos.close(); Logger.w(TAG, "Encoded file size=" + myFile.length()); } catch (IOException e) { e.printStackTrace(); } } }
Removed code for testing
speech-android-wrapper/src/main/java/com/ibm/cio/opus/OpusWriter.java
Removed code for testing
Java
apache-2.0
5ef844e108a9709688ad79738f835036f342b249
0
xtremelabs/xl-image_utils_lib-android
package com.xtremelabs.imageutils; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import android.util.Log; import com.xtremelabs.imageutils.NetworkRequestCreator.InputStreamListener; class ImageDownloader implements ImageNetworkInterface { @SuppressWarnings("unused") private static final String TAG = "DefaultImageDownloader"; private final NetworkToDiskInterface mNetworkToDiskInterface; private final ImageDownloadObserver mImageDownloadObserver; private final HashMap<String, ImageDownloadingRunnable> mUrlToRunnableMap = new HashMap<String, ImageDownloadingRunnable>(); private NetworkRequestCreator mNetworkRequestCreator = new DefaultNetworkRequestCreator(); /* * TODO: Research into lowering the number of available threads for the network */ private final LifoThreadPool mThreadPool = new LifoThreadPool(3); public ImageDownloader(NetworkToDiskInterface networkToDiskInterface, ImageDownloadObserver imageDownloadObserver) { mNetworkToDiskInterface = networkToDiskInterface; mImageDownloadObserver = imageDownloadObserver; } @Override public synchronized void bump(String url) { ImageDownloadingRunnable runnable = mUrlToRunnableMap.get(url); if (runnable != null) { mThreadPool.bump(runnable); } } @Override public synchronized void downloadImageToDisk(final String url) { ImageDownloadingRunnable runnable = new ImageDownloadingRunnable(url); if (!mUrlToRunnableMap.containsKey(url)) { mUrlToRunnableMap.put(url, runnable); mThreadPool.execute(runnable); } } @Override public synchronized void setNetworkRequestCreator(NetworkRequestCreator networkRequestCreator) { if (networkRequestCreator == null) { mNetworkRequestCreator = new DefaultNetworkRequestCreator(); } else { mNetworkRequestCreator = networkRequestCreator; } } private synchronized void removeUrlFromMap(String url) { mUrlToRunnableMap.remove(url); } class ImageDownloadingRunnable implements Runnable { private final String mUrl; public ImageDownloadingRunnable(String url) { mUrl = url; } @Override public void run() { mNetworkRequestCreator.getInputStream(mUrl, new InputStreamListener() { @Override public void onInputStreamReady(InputStream inputStream) { String errorMessage = loadInputStreamToDisk(inputStream); removeUrlFromMap(mUrl); if (errorMessage != null) { mImageDownloadObserver.onImageDownloadFailed(mUrl, errorMessage); } else { mImageDownloadObserver.onImageDownloaded(mUrl); } } @Override public void onFailure(String errorMessage) { removeUrlFromMap(mUrl); mImageDownloadObserver.onImageDownloadFailed(mUrl, errorMessage); } }); } private String loadInputStreamToDisk(InputStream inputStream) { String errorMessage = null; if (inputStream != null) { try { Log.d("ImageLoader", "Downloading image: " + mUrl); mNetworkToDiskInterface.downloadImageFromInputStream(mUrl, inputStream); } catch (IOException e) { errorMessage = "IOException when downloading image: " + mUrl + ", Exception type: " + e.getClass().getName() + ", Exception message: " + e.getMessage(); } catch (IllegalArgumentException e) { errorMessage = "Failed to download image with error message: " + e.getMessage(); } catch (IllegalStateException e) { /* * NOTE: If a bad URL is passed in (for example, mUrl = "N/A", the client.execute() call will throw an IllegalStateException. We do not want this exception to crash the app. Rather, we want to log the * error and report a failure. */ Log.w(AbstractImageLoader.TAG, "IMAGE LOAD FAILED - An error occurred while performing the network request for the image. Stack trace below. URL: " + mUrl); e.printStackTrace(); errorMessage = "Failed to download image. A stack trace has been output to the logs. Message: " + e.getMessage(); } finally { try { if (inputStream != null) { inputStream.close(); } } catch (IOException e) { } } } return errorMessage; } } @Override public synchronized boolean isNetworkRequestPendingForUrl(String url) { return mUrlToRunnableMap.containsKey(url); } }
xl_image_utils_lib/src/com/xtremelabs/imageutils/ImageDownloader.java
package com.xtremelabs.imageutils; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import android.util.Log; import com.xtremelabs.imageutils.NetworkRequestCreator.InputStreamListener; class ImageDownloader implements ImageNetworkInterface { @SuppressWarnings("unused") private static final String TAG = "DefaultImageDownloader"; private final NetworkToDiskInterface mNetworkToDiskInterface; private final ImageDownloadObserver mImageDownloadObserver; private final HashMap<String, ImageDownloadingRunnable> mUrlToRunnableMap = new HashMap<String, ImageDownloadingRunnable>(); private NetworkRequestCreator mNetworkRequestCreator = new DefaultNetworkRequestCreator(); /* * TODO: Research into lowering the number of available threads for the network */ private final LifoThreadPool mThreadPool = new LifoThreadPool(3); public ImageDownloader(NetworkToDiskInterface networkToDiskInterface, ImageDownloadObserver imageDownloadObserver) { mNetworkToDiskInterface = networkToDiskInterface; mImageDownloadObserver = imageDownloadObserver; } @Override public synchronized void bump(String url) { ImageDownloadingRunnable runnable = mUrlToRunnableMap.get(url); if (runnable != null) { mThreadPool.bump(runnable); } } @Override public synchronized void downloadImageToDisk(final String url) { ImageDownloadingRunnable runnable = new ImageDownloadingRunnable(url); if (!mUrlToRunnableMap.containsKey(url)) { mUrlToRunnableMap.put(url, runnable); mThreadPool.execute(runnable); } } @Override public synchronized void setNetworkRequestCreator(NetworkRequestCreator networkRequestCreator) { if (networkRequestCreator == null) { mNetworkRequestCreator = new DefaultNetworkRequestCreator(); } else { mNetworkRequestCreator = networkRequestCreator; } } private synchronized void removeUrlFromMap(String url) { mUrlToRunnableMap.remove(url); } class ImageDownloadingRunnable implements Runnable { private final String mUrl; public ImageDownloadingRunnable(String url) { mUrl = url; } @Override public void run() { mNetworkRequestCreator.getInputStream(mUrl, new InputStreamListener() { @Override public void onInputStreamReady(InputStream inputStream) { String errorMessage = loadInputStreamToDisk(inputStream); removeUrlFromMap(mUrl); if (errorMessage != null) { mImageDownloadObserver.onImageDownloadFailed(mUrl, errorMessage); } else { mImageDownloadObserver.onImageDownloaded(mUrl); } } @Override public void onFailure(String errorMessage) { removeUrlFromMap(mUrl); mImageDownloadObserver.onImageDownloadFailed(mUrl, errorMessage); } }); } private String loadInputStreamToDisk(InputStream inputStream) { String errorMessage = null; if (inputStream != null) { try { mNetworkToDiskInterface.downloadImageFromInputStream(mUrl, inputStream); } catch (IOException e) { errorMessage = "IOException when downloading image: " + mUrl + ", Exception type: " + e.getClass().getName() + ", Exception message: " + e.getMessage(); } catch (IllegalArgumentException e) { errorMessage = "Failed to download image with error message: " + e.getMessage(); } catch (IllegalStateException e) { /* * NOTE: If a bad URL is passed in (for example, mUrl = "N/A", the client.execute() call will throw an IllegalStateException. We do not want this exception to crash the app. Rather, we want to log the * error and report a failure. */ Log.w(AbstractImageLoader.TAG, "IMAGE LOAD FAILED - An error occurred while performing the network request for the image. Stack trace below. URL: " + mUrl); e.printStackTrace(); errorMessage = "Failed to download image. A stack trace has been output to the logs. Message: " + e.getMessage(); } finally { try { if (inputStream != null) { inputStream.close(); } } catch (IOException e) { } } } return errorMessage; } } @Override public synchronized boolean isNetworkRequestPendingForUrl(String url) { return mUrlToRunnableMap.containsKey(url); } }
Jamie H - Added a log to find out what images have been requested for download.
xl_image_utils_lib/src/com/xtremelabs/imageutils/ImageDownloader.java
Jamie H - Added a log to find out what images have been requested for download.
Java
apache-2.0
80a0e047fb75dfc058ca1a78be1e5bb6c4de1766
0
zibhub/GNDMS,zibhub/GNDMS,zibhub/GNDMS,zibhub/GNDMS
package de.zib.gndms.stuff.confuror; import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.map.ObjectMapper; import org.jetbrains.annotations.NotNull; import org.testng.Assert; import org.testng.annotations.*; import java.io.IOException; import java.util.concurrent.atomic.AtomicReference; public class ConfigTreeTest { private volatile ConfigTree tree; private final @NotNull ObjectMapper objectMapper; private final @NotNull JsonFactory factory; private final @NotNull ConfigEditor.Visitor visitor; { objectMapper = new ObjectMapper(); factory = objectMapper.getJsonFactory(); visitor = new ConfigEditor.DefaultVisitor(); } @BeforeMethod public void setup() { tree = new ConfigTree(); tree.setObjectMapper(objectMapper); objectMapper.disableDefaultTyping(); } @SuppressWarnings({"UnusedDeclaration"}) String toSingle(String singleJson) { return singleJson.replace('"', '\''); } String toDouble(String singleJson) { return singleJson.replace('\'', '"'); } JsonNode parseDouble(String input) throws IOException { return factory.createJsonParser(input).readValueAsTree(); } JsonNode parseSingle(String single) throws IOException { return parseDouble(toDouble(single)); } @Test public void replaceEmptyRoot() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode update = parseDouble("12"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isValueNode()); Assert.assertTrue(snapshot.equals(update)); } @Test public void replaceObjectRoot() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode update = parseSingle("{ 'a': 12, 'b': 4 }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(update)); } @Test public void appendBelowRoot() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': { 'x': 4 }, 'b': 4 }"); tree.update(editor, init); JsonNode update = parseSingle("{ '+a': { 'y': 2 } }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'a': { 'x': 4, 'y': 2 }, 'b': 4 }"))); } @Test public void appendDeepBelowRoot() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': { 'x': 4, 'y': { 'c': 7 } }, 'b': 4 }"); tree.update(editor, init); JsonNode update = parseSingle("{ '+a': { '+y': { 'd': 12 } } }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'a': { 'x': 4, 'y': { 'c': 7, 'd': 12 } }, 'b': 4 }"))); } @Test(expectedExceptions = { ConfigEditor.UpdateRejectedException.class }) public void errorDeepBelowRoot1() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': { 'x': 4, 'y': [] }, 'b': 4 }"); tree.update(editor, init); JsonNode update = parseSingle("{ '+a': { '+y': { 'd': 12 } } }"); tree.update(editor, update); } @Test(expectedExceptions = { ConfigEditor.UpdateRejectedException.class }) public void errorDeepBelowRoot2() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': { 'x': 4, 'y': { 'c': 7 } }, 'b': 4 }"); tree.update(editor, init); JsonNode update = parseSingle("{ '+a': { '+y': { '+d': 12 } } }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'a': { 'x': 4, 'y': { 'c': 7, 'd': 12 } }, 'b': 4 }"))); } @Test public void updateDeepBelowRoot() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': { 'x': 4, 'y': { 'c': 7, 'k': 4 } }, 'b': 4 }"); tree.update(editor, init); JsonNode update = parseSingle("{ '+a': { '+y': { '-d': null, 'c': 8 } } }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'a': { 'x': 4, 'y': { 'k': 4, 'c': 8 } }, 'b': 4 }"))); } @Test public void deleteBelowRoot() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': 12, 'b': 4 }"); tree.update(editor, init); JsonNode update = parseSingle("{ '-a': null }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'b': 4 }"))); } @Test public void deleteMissingBelowRoot() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': 12, 'b': 4 }"); tree.update(editor, init); JsonNode update = parseSingle("{ '-c': null }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(init)); } @Test public void updateRootObject() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': 12, 'b': 4 }"); tree.update(editor, init); JsonNode update = parseSingle("{ 'a': 14 }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'a': 14, 'b': 4 }"))); } @Test public void updateRootArray() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("[ 'a', 'b' ]"); tree.update(editor, init); JsonNode update = parseSingle("[ 'a', 'x' ]"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isArray()); Assert.assertTrue(snapshot.equals(update)); } @Test public void updateNestedObject() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': 12, 'b': { 'x': 2 } }"); tree.update(editor, init); JsonNode update = parseSingle("{ 'b': { 'y' : 4 } }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'a': 12, 'b': { 'y': 4 } }"))); } @Test public void addNestedObject() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': 12, 'b': { 'x': 2 } }"); tree.update(editor, init); JsonNode update = parseSingle("{ 'c': { 'y' : 4 } }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'a': 12, 'b' : { 'x': 2 }, 'c': { 'y': 4 } }"))); } @Test public void updateDeeplyNestedObject() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': 12, 'b': { 'x': { 'c' : 4 } } }"); tree.update(editor, init); JsonNode update = parseSingle("{ 'b': { 'x' : { 'c' : 7 } } }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'a': 12, 'b': { 'x': { 'c' : 7 } } }"))); } @Test public void updateNestedArray() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': 12, 'b': [ 2, 3, 4 ] }"); tree.update(editor, init); JsonNode update = parseSingle("{ 'b': [ 2, 5, 7 ] }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'a': 12, 'b': [ 2, 5, 7 ] }"))); } @Test public void pathTest() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': 12, 'b': { 'x': { 'c' : 4 } } }"); tree.update(editor, init); final AtomicReference<Object[]> ref = new AtomicReference<Object[]>(null); ConfigEditor reportingEditor = tree.newUpdater(new ConfigEditor.Visitor() { public ObjectMapper getObjectMapper() { return objectMapper; } public void updateNode(@NotNull ConfigEditor.Update updater) { ref.getAndSet(updater.getPath()); updater.accept(); } }); tree.update(reportingEditor, parseSingle("{ '+b': { '+x': { 'q': 5 } } }")); final Object[] result = ref.get(); Assert.assertTrue(result.length == 3); Assert.assertTrue(result[0].equals("b")); Assert.assertTrue(result[1].equals("x")); Assert.assertTrue(result[2].equals("q")); } }
stuff/test-src/de/zib/gndms/stuff/confuror/ConfigTreeTest.java
package de.zib.gndms.stuff.confuror; import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.map.ObjectMapper; import org.jetbrains.annotations.NotNull; import org.testng.Assert; import org.testng.annotations.*; import java.io.IOException; public class ConfigTreeTest { private volatile ConfigTree tree; private final @NotNull ObjectMapper objectMapper; private final @NotNull JsonFactory factory; private final @NotNull ConfigEditor.Visitor visitor; { objectMapper = new ObjectMapper(); factory = objectMapper.getJsonFactory(); visitor = new ConfigEditor.DefaultVisitor(); } @BeforeMethod public void setup() { tree = new ConfigTree(); tree.setObjectMapper(objectMapper); objectMapper.disableDefaultTyping(); } @SuppressWarnings({"UnusedDeclaration"}) String toSingle(String singleJson) { return singleJson.replace('"', '\''); } String toDouble(String singleJson) { return singleJson.replace('\'', '"'); } JsonNode parseDouble(String input) throws IOException { return factory.createJsonParser(input).readValueAsTree(); } JsonNode parseSingle(String single) throws IOException { return parseDouble(toDouble(single)); } @Test public void replaceEmptyRoot() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode update = parseDouble("12"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isValueNode()); Assert.assertTrue(snapshot.equals(update)); } @Test public void replaceObjectRoot() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode update = parseSingle("{ 'a': 12, 'b': 4 }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(update)); } @Test public void appendBelowRoot() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': { 'x': 4 }, 'b': 4 }"); tree.update(editor, init); JsonNode update = parseSingle("{ '+a': { 'y': 2 } }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'a': { 'x': 4, 'y': 2 }, 'b': 4 }"))); } @Test public void appendDeepBelowRoot() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': { 'x': 4, 'y': { 'c': 7 } }, 'b': 4 }"); tree.update(editor, init); JsonNode update = parseSingle("{ '+a': { '+y': { 'd': 12 } } }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'a': { 'x': 4, 'y': { 'c': 7, 'd': 12 } }, 'b': 4 }"))); } @Test(expectedExceptions = { ConfigEditor.UpdateRejectedException.class }) public void errorDeepBelowRoot1() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': { 'x': 4, 'y': [] }, 'b': 4 }"); tree.update(editor, init); JsonNode update = parseSingle("{ '+a': { '+y': { 'd': 12 } } }"); tree.update(editor, update); } @Test(expectedExceptions = { ConfigEditor.UpdateRejectedException.class }) public void errorDeepBelowRoot2() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': { 'x': 4, 'y': { 'c': 7 } }, 'b': 4 }"); tree.update(editor, init); JsonNode update = parseSingle("{ '+a': { '+y': { '+d': 12 } } }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'a': { 'x': 4, 'y': { 'c': 7, 'd': 12 } }, 'b': 4 }"))); } @Test public void updateDeepBelowRoot() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': { 'x': 4, 'y': { 'c': 7, 'k': 4 } }, 'b': 4 }"); tree.update(editor, init); JsonNode update = parseSingle("{ '+a': { '+y': { '-d': null, 'c': 8 } } }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'a': { 'x': 4, 'y': { 'k': 4, 'c': 8 } }, 'b': 4 }"))); } @Test public void deleteBelowRoot() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': 12, 'b': 4 }"); tree.update(editor, init); JsonNode update = parseSingle("{ '-a': null }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'b': 4 }"))); } @Test public void deleteMissingBelowRoot() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': 12, 'b': 4 }"); tree.update(editor, init); JsonNode update = parseSingle("{ '-c': null }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(init)); } @Test public void updateRootObject() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': 12, 'b': 4 }"); tree.update(editor, init); JsonNode update = parseSingle("{ 'a': 14 }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'a': 14, 'b': 4 }"))); } @Test public void updateRootArray() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("[ 'a', 'b' ]"); tree.update(editor, init); JsonNode update = parseSingle("[ 'a', 'x' ]"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isArray()); Assert.assertTrue(snapshot.equals(update)); } @Test public void updateNestedObject() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': 12, 'b': { 'x': 2 } }"); tree.update(editor, init); JsonNode update = parseSingle("{ 'b': { 'y' : 4 } }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'a': 12, 'b': { 'y': 4 } }"))); } @Test public void addNestedObject() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': 12, 'b': { 'x': 2 } }"); tree.update(editor, init); JsonNode update = parseSingle("{ 'c': { 'y' : 4 } }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'a': 12, 'b' : { 'x': 2 }, 'c': { 'y': 4 } }"))); } @Test public void updateDeeplyNestedObject() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': 12, 'b': { 'x': { 'c' : 4 } } }"); tree.update(editor, init); JsonNode update = parseSingle("{ 'b': { 'x' : { 'c' : 7 } } }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'a': 12, 'b': { 'x': { 'c' : 7 } } }"))); } @Test public void updateNestedArray() throws IOException, ConfigEditor.UpdateRejectedException { ConfigEditor editor = tree.newUpdater(visitor); JsonNode init = parseSingle("{ 'a': 12, 'b': [ 2, 3, 4 ] }"); tree.update(editor, init); JsonNode update = parseSingle("{ 'b': [ 2, 5, 7 ] }"); tree.update(editor, update); final JsonNode snapshot = tree.getSnapshotAsNode(); Assert.assertTrue(snapshot.isObject()); Assert.assertTrue(snapshot.equals(parseSingle("{ 'a': 12, 'b': [ 2, 5, 7 ] }"))); } }
Added new confuror test for getPath
stuff/test-src/de/zib/gndms/stuff/confuror/ConfigTreeTest.java
Added new confuror test for getPath
Java
apache-2.0
ae764730b59e2512888210ae8702074fb8e394c5
0
apache/lenya,apache/lenya,apache/lenya,apache/lenya
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.lenya.cms.cocoon.source; import java.io.IOException; import java.net.MalformedURLException; import java.util.Map; import org.apache.avalon.framework.configuration.Configurable; import org.apache.avalon.framework.configuration.Configuration; import org.apache.avalon.framework.configuration.ConfigurationException; import org.apache.avalon.framework.context.Context; import org.apache.avalon.framework.context.ContextException; import org.apache.avalon.framework.context.Contextualizable; import org.apache.avalon.framework.logger.AbstractLogEnabled; import org.apache.avalon.framework.service.ServiceException; import org.apache.avalon.framework.service.ServiceManager; import org.apache.avalon.framework.service.Serviceable; import org.apache.avalon.framework.thread.ThreadSafe; import org.apache.cocoon.components.ContextHelper; import org.apache.cocoon.environment.ObjectModelHelper; import org.apache.cocoon.environment.Request; import org.apache.excalibur.source.Source; import org.apache.excalibur.source.SourceException; import org.apache.excalibur.source.SourceFactory; import org.apache.lenya.cms.publication.Document; import org.apache.lenya.cms.publication.DocumentBuildException; import org.apache.lenya.cms.publication.DocumentFactory; import org.apache.lenya.cms.publication.DocumentUtil; import org.apache.lenya.cms.publication.Publication; import org.apache.lenya.cms.publication.PublicationException; import org.apache.lenya.cms.publication.PublicationUtil; import org.apache.lenya.cms.publication.URLInformation; import org.apache.lenya.cms.repository.RepositoryException; import org.apache.lenya.cms.repository.RepositoryUtil; import org.apache.lenya.cms.repository.Session; import org.apache.lenya.util.ServletHelper; /** * A factory for the "lenyadoc" scheme (virtual protocol), which is used to resolve any * src="lenyadoc:<...>" attributes in sitemaps. * * <code>lenyadoc://<publication>/<area>/<language>/<uuid></code> * <code>lenyadoc:/<language>/<uuid></code> * * If we want to request the meta data for a document * instead of the document itself, we need to use * * <code>lenyadoc:meta:/<language>/<uuid></code> * <code>lenyadoc:meta://<publication>/<area>/<language>/<uuid></code> * * @version $Id:$ * @deprecated Use <code>lenya-document</code> instead (see {@link org.apache.lenya.cms.cocoon.source.DocumentSourceFactory}. */ public class LenyaDocSourceFactory extends AbstractLogEnabled implements SourceFactory, ThreadSafe, Contextualizable, Serviceable, Configurable { protected static final String SCHEME = "lenyadoc"; private Context context; private ServiceManager manager; /** * Used for resolving the object model. * @see org.apache.avalon.framework.context.Contextualizable#contextualize(org.apache.avalon.framework.context.Context) */ public void contextualize(Context context) throws ContextException { this.context = context; } /** * @see org.apache.avalon.framework.service.Serviceable#service(org.apache.avalon.framework.service.ServiceManager) */ public void service(ServiceManager manager) throws ServiceException { this.manager = manager; } /** * @see org.apache.avalon.framework.configuration.Configurable#configure(org.apache.avalon.framework.configuration.Configuration) */ public void configure(Configuration configuration) throws ConfigurationException { } /** * @see org.apache.excalibur.source.SourceFactory#getSource(java.lang.String, java.util.Map) */ public Source getSource(String location, Map parameters) throws MalformedURLException, IOException, SourceException { String scheme = null; String area = null; String language = null; String uuid = null; Publication pub; // Parse the url int start = 0; int end; // Scheme end = location.indexOf(':', start); if (end == -1) { throw new MalformedURLException("Malformed lenyadoc: URI: can not find scheme part [" + location + "]"); } scheme = location.substring(start, end); if (!SCHEME.equals(scheme)) { throw new MalformedURLException("Malformed lenyadoc: URI: unknown scheme [" + location + "]"); } Map objectModel = ContextHelper.getObjectModel(this.context); Request request = ObjectModelHelper.getRequest(objectModel); DocumentFactory factory = DocumentUtil.getDocumentFactory(this.manager, request); start = end + 1; // Absolute vs. relative if (location.startsWith("//", start)) { // Absolute: get publication id start += 2; end = location.indexOf('/', start); if (end == -1) { throw new MalformedURLException("Malformed lenyadoc: URI: publication part not found [" + location + "]"); } String publicationId = location.substring(start, end); try { pub = factory.getPublication(publicationId); } catch (PublicationException e) { throw new MalformedURLException("Malformed lenyadoc: Publication [" + publicationId + "] does not exist or could not be initialized"); } if (pub == null || !pub.exists()) { throw new SourceException("The publication [" + publicationId + "] does not exist!"); } // Area start = end + 1; end = location.indexOf('/', start); if (end == -1) { throw new MalformedURLException("Malformed lenyadoc: URI: cannot find area [" + location + "]"); } area = location.substring(start, end); } else if (location.startsWith("/", start)) { end += 1; // Relative: get publication id and area from page envelope try { pub = PublicationUtil.getPublication(this.manager, objectModel); } catch (PublicationException e) { throw new SourceException("Error getting publication id / area from page envelope [" + location + "]"); } if (pub != null && pub.exists()) { String url = ServletHelper.getWebappURI(request); area = new URLInformation(url).getArea(); } else { throw new SourceException("Error getting publication id / area from page envelope [" + location + "]"); } } else { throw new MalformedURLException("Malformed lenyadoc: URI [" + location + "]"); } // Language start = end + 1; end = location.indexOf('/', start); if (end == -1) { throw new MalformedURLException("Malformed lenyadoc: URI: cannot find language [" + location + "]"); } language = location.substring(start, end); // UUID start = end + 1; uuid = location.substring(start); Session session; try { session = RepositoryUtil.getSession(this.manager, request); } catch (RepositoryException e1) { throw new RuntimeException(e1); } if (getLogger().isDebugEnabled()) { getLogger().debug("Creating repository source for URI [" + location + "]"); } Document document; try { document = factory.get(pub, area, uuid, language); } catch (DocumentBuildException e) { throw new MalformedURLException("Malformed lenyadoc: Document [" + uuid + ":" + language + "] could not be created."); } String lenyaURL = document.getSourceURI(); if (getLogger().isDebugEnabled()) { getLogger().debug("Mapping 'lenyadoc:' URL [" + location + "] to 'lenya:' URL [" + lenyaURL + "]"); getLogger().debug("Creating repository source for URI [" + lenyaURL + "]"); } return new RepositorySource(manager, lenyaURL, session, getLogger()); } /** * @see org.apache.excalibur.source.SourceFactory#release(org.apache.excalibur.source.Source) */ public void release(Source source) { // Source will be released by delegated source factory. } }
src/modules/lenyadoc/java/src/org/apache/lenya/cms/cocoon/source/LenyaDocSourceFactory.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.lenya.cms.cocoon.source; import java.io.IOException; import java.net.MalformedURLException; import java.util.Map; import org.apache.avalon.framework.configuration.Configurable; import org.apache.avalon.framework.configuration.Configuration; import org.apache.avalon.framework.configuration.ConfigurationException; import org.apache.avalon.framework.context.Context; import org.apache.avalon.framework.context.ContextException; import org.apache.avalon.framework.context.Contextualizable; import org.apache.avalon.framework.logger.AbstractLogEnabled; import org.apache.avalon.framework.service.ServiceException; import org.apache.avalon.framework.service.ServiceManager; import org.apache.avalon.framework.service.Serviceable; import org.apache.avalon.framework.thread.ThreadSafe; import org.apache.cocoon.components.ContextHelper; import org.apache.cocoon.environment.ObjectModelHelper; import org.apache.cocoon.environment.Request; import org.apache.excalibur.source.Source; import org.apache.excalibur.source.SourceException; import org.apache.excalibur.source.SourceFactory; import org.apache.lenya.cms.publication.Document; import org.apache.lenya.cms.publication.DocumentBuildException; import org.apache.lenya.cms.publication.DocumentFactory; import org.apache.lenya.cms.publication.DocumentUtil; import org.apache.lenya.cms.publication.Publication; import org.apache.lenya.cms.publication.PublicationException; import org.apache.lenya.cms.publication.PublicationUtil; import org.apache.lenya.cms.publication.URLInformation; import org.apache.lenya.cms.repository.RepositoryException; import org.apache.lenya.cms.repository.RepositoryUtil; import org.apache.lenya.cms.repository.Session; import org.apache.lenya.util.ServletHelper; /** * A factory for the "lenyadoc" scheme (virtual protocol), which is used to resolve any * src="lenyadoc:<...>" attributes in sitemaps. * * <code>lenyadoc://<publication>/<area>/<language>/<uuid></code> * <code>lenyadoc:/<language>/<uuid></code> * * If we want to request the meta data for a document * instead of the document itself, we need to use * * <code>lenyadoc:meta:/<language>/<uuid></code> * <code>lenyadoc:meta://<publication>/<area>/<language>/<uuid></code> * * @version $Id:$ */ public class LenyaDocSourceFactory extends AbstractLogEnabled implements SourceFactory, ThreadSafe, Contextualizable, Serviceable, Configurable { protected static final String SCHEME = "lenyadoc"; private Context context; private ServiceManager manager; /** * Used for resolving the object model. * @see org.apache.avalon.framework.context.Contextualizable#contextualize(org.apache.avalon.framework.context.Context) */ public void contextualize(Context context) throws ContextException { this.context = context; } /** * @see org.apache.avalon.framework.service.Serviceable#service(org.apache.avalon.framework.service.ServiceManager) */ public void service(ServiceManager manager) throws ServiceException { this.manager = manager; } /** * @see org.apache.avalon.framework.configuration.Configurable#configure(org.apache.avalon.framework.configuration.Configuration) */ public void configure(Configuration configuration) throws ConfigurationException { } /** * @see org.apache.excalibur.source.SourceFactory#getSource(java.lang.String, java.util.Map) */ public Source getSource(String location, Map parameters) throws MalformedURLException, IOException, SourceException { String scheme = null; String area = null; String language = null; String uuid = null; Publication pub; // Parse the url int start = 0; int end; // Scheme end = location.indexOf(':', start); if (end == -1) { throw new MalformedURLException("Malformed lenyadoc: URI: can not find scheme part [" + location + "]"); } scheme = location.substring(start, end); if (!SCHEME.equals(scheme)) { throw new MalformedURLException("Malformed lenyadoc: URI: unknown scheme [" + location + "]"); } Map objectModel = ContextHelper.getObjectModel(this.context); Request request = ObjectModelHelper.getRequest(objectModel); DocumentFactory factory = DocumentUtil.getDocumentFactory(this.manager, request); start = end + 1; // Absolute vs. relative if (location.startsWith("//", start)) { // Absolute: get publication id start += 2; end = location.indexOf('/', start); if (end == -1) { throw new MalformedURLException("Malformed lenyadoc: URI: publication part not found [" + location + "]"); } String publicationId = location.substring(start, end); try { pub = factory.getPublication(publicationId); } catch (PublicationException e) { throw new MalformedURLException("Malformed lenyadoc: Publication [" + publicationId + "] does not exist or could not be initialized"); } if (pub == null || !pub.exists()) { throw new SourceException("The publication [" + publicationId + "] does not exist!"); } // Area start = end + 1; end = location.indexOf('/', start); if (end == -1) { throw new MalformedURLException("Malformed lenyadoc: URI: cannot find area [" + location + "]"); } area = location.substring(start, end); } else if (location.startsWith("/", start)) { end += 1; // Relative: get publication id and area from page envelope try { pub = PublicationUtil.getPublication(this.manager, objectModel); } catch (PublicationException e) { throw new SourceException("Error getting publication id / area from page envelope [" + location + "]"); } if (pub != null && pub.exists()) { String url = ServletHelper.getWebappURI(request); area = new URLInformation(url).getArea(); } else { throw new SourceException("Error getting publication id / area from page envelope [" + location + "]"); } } else { throw new MalformedURLException("Malformed lenyadoc: URI [" + location + "]"); } // Language start = end + 1; end = location.indexOf('/', start); if (end == -1) { throw new MalformedURLException("Malformed lenyadoc: URI: cannot find language [" + location + "]"); } language = location.substring(start, end); // UUID start = end + 1; uuid = location.substring(start); Session session; try { session = RepositoryUtil.getSession(this.manager, request); } catch (RepositoryException e1) { throw new RuntimeException(e1); } if (getLogger().isDebugEnabled()) { getLogger().debug("Creating repository source for URI [" + location + "]"); } Document document; try { document = factory.get(pub, area, uuid, language); } catch (DocumentBuildException e) { throw new MalformedURLException("Malformed lenyadoc: Document [" + uuid + ":" + language + "] could not be created."); } String lenyaURL = document.getSourceURI(); if (getLogger().isDebugEnabled()) { getLogger().debug("Mapping 'lenyadoc:' URL [" + location + "] to 'lenya:' URL [" + lenyaURL + "]"); getLogger().debug("Creating repository source for URI [" + lenyaURL + "]"); } return new RepositorySource(manager, lenyaURL, session, getLogger()); } /** * @see org.apache.excalibur.source.SourceFactory#release(org.apache.excalibur.source.Source) */ public void release(Source source) { // Source will be released by delegated source factory. } }
Deprecating lenyadoc source factory in favor of lenya-document: protocol. git-svn-id: c334bb69c16d150e1b06e84516f7aa90b3181ca2@641518 13f79535-47bb-0310-9956-ffa450edef68
src/modules/lenyadoc/java/src/org/apache/lenya/cms/cocoon/source/LenyaDocSourceFactory.java
Deprecating lenyadoc source factory in favor of lenya-document: protocol.
Java
apache-2.0
ccaf930d243c60a4cc0d5de0823624afbf625fcb
0
wikipathways/GPML2RDF,mkutmon/GPML2RDF
// WP2RDF // Conversion from GPML pathways to RDF // Copyright 2015 BiGCaT Bioinformatics // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package org.wikipathways.wp2rdf; import org.pathvisio.core.model.MGroup; import org.pathvisio.core.model.MLine; import org.pathvisio.core.model.ObjectType; import org.pathvisio.core.model.Pathway; import org.pathvisio.core.model.PathwayElement; import org.wikipathways.wp2rdf.converter.DataNodeConverter; import org.wikipathways.wp2rdf.converter.GraphicalLineConverter; import org.wikipathways.wp2rdf.converter.GroupConverter; import org.wikipathways.wp2rdf.converter.InfoBoxConverter; import org.wikipathways.wp2rdf.converter.InteractionConverter; import org.wikipathways.wp2rdf.converter.LabelConverter; import org.wikipathways.wp2rdf.converter.PathwayConverter; import org.wikipathways.wp2rdf.converter.ShapeConverter; import org.wikipathways.wp2rdf.converter.StateConverter; import org.wikipathways.wp2rdf.utils.DataHandlerGpml; import org.wikipathways.wp2rdf.utils.DataHandlerWp; import org.wikipathways.wp2rdf.utils.Utils; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.Resource; /** * Class that converts a pathway * in a RDF model - only GPML vocabulary * * This work is based on Andra Waagmeester's work. * * @author mkutmon * @author ryanmiller * */ public class GpmlConverter { public static Model convertGpml(Pathway p, String wpId, String revision) { Model pathwayModel = ModelFactory.createDefaultModel(); Utils.setModelPrefix(pathwayModel); convertGpml(p, wpId, revision, pathwayModel); return pathwayModel; } public static void convertGpml(Pathway p, String wpId, String revision, Model pathwayModel) { Resource pathwayRes = PathwayConverter.parsePathwayInfoGpml(p, wpId, revision, pathwayModel); DataHandlerGpml data = new DataHandlerGpml(p, pathwayRes); for(PathwayElement e : p.getDataObjects()) { if(e.getObjectType().equals(ObjectType.DATANODE)) { DataNodeConverter.parseDataNodesGpml(e, pathwayModel, data); } else if(e.getObjectType().equals(ObjectType.STATE)) { StateConverter.parseStateGpml(e, pathwayModel, data); } else if(e.getObjectType().equals(ObjectType.LINE)) { InteractionConverter.parseInteractionGpml((MLine) e, pathwayModel, data); } else if(e.getObjectType().equals(ObjectType.GRAPHLINE)) { GraphicalLineConverter.parseInteractionGpml((MLine) e, pathwayModel, data); } else if(e.getObjectType().equals(ObjectType.LABEL)) { LabelConverter.parseLabelGpml(e, pathwayModel, data); } else if(e.getObjectType().equals(ObjectType.SHAPE)) { ShapeConverter.parseShapeGpml(e, pathwayModel, data); } else if(e.getObjectType().equals(ObjectType.GROUP)) { GroupConverter.parseGroupGpml((MGroup) e, pathwayModel, data); } else if(e.getObjectType().equals(ObjectType.INFOBOX)) { InfoBoxConverter.parseInfoBoxGpml(e, pathwayModel, data); } } } public static Model convertWp(Pathway p, String wpId, String revision) { Model pathwayModel = ModelFactory.createDefaultModel(); Utils.setModelPrefix(pathwayModel); convertWp(p, wpId, revision, pathwayModel); return pathwayModel; } public static void convertWp(Pathway p, String wpId, String revision, Model pathwayModel) { Resource pathwayRes = PathwayConverter.parsePathwayInfoWp(p, wpId, revision, pathwayModel); DataHandlerWp data = new DataHandlerWp(p, wpId, revision, pathwayRes); for(PathwayElement e : p.getDataObjects()) { if(e.getObjectType().equals(ObjectType.DATANODE)) { DataNodeConverter.parseDataNodeWp(e, pathwayModel, data, p); } } for(PathwayElement e : p.getDataObjects()) { if(e.getObjectType().equals(ObjectType.GROUP)) { GroupConverter.parseComplexWp((MGroup) e, pathwayModel, data); } } for(PathwayElement e : p.getDataObjects()) { if(e.getObjectType().equals(ObjectType.LINE)) { InteractionConverter.parseInteractionWp((MLine)e, pathwayModel, data); } } } }
WP2RDF/src/main/java/org/wikipathways/wp2rdf/GpmlConverter.java
// WP2RDF // Conversion from GPML pathways to RDF // Copyright 2015 BiGCaT Bioinformatics // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package org.wikipathways.wp2rdf; import org.pathvisio.core.model.MGroup; import org.pathvisio.core.model.MLine; import org.pathvisio.core.model.ObjectType; import org.pathvisio.core.model.Pathway; import org.pathvisio.core.model.PathwayElement; import org.wikipathways.wp2rdf.converter.DataNodeConverter; import org.wikipathways.wp2rdf.converter.GraphicalLineConverter; import org.wikipathways.wp2rdf.converter.GroupConverter; import org.wikipathways.wp2rdf.converter.InfoBoxConverter; import org.wikipathways.wp2rdf.converter.InteractionConverter; import org.wikipathways.wp2rdf.converter.LabelConverter; import org.wikipathways.wp2rdf.converter.PathwayConverter; import org.wikipathways.wp2rdf.converter.ShapeConverter; import org.wikipathways.wp2rdf.converter.StateConverter; import org.wikipathways.wp2rdf.utils.DataHandlerGpml; import org.wikipathways.wp2rdf.utils.DataHandlerWp; import org.wikipathways.wp2rdf.utils.Utils; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.Resource; /** * Class that converts a pathway * in a RDF model - only GPML vocabulary * * This work is based on Andra Waagmeester's work. * * @author mkutmon * @author ryanmiller * */ public class GpmlConverter { public static Model convertGpml(Pathway p, String wpId, String revision) { Model pathwayModel = ModelFactory.createDefaultModel(); Utils.setModelPrefix(pathwayModel); convertGpml(p, wpId, revision, pathwayModel); return pathwayModel; } public static void convertGpml(Pathway p, String wpId, String revision, Model pathwayModel) { Resource pathwayRes = PathwayConverter.parsePathwayInfoGpml(p, wpId, revision, pathwayModel); DataHandlerGpml data = new DataHandlerGpml(p, pathwayRes); for(PathwayElement e : p.getDataObjects()) { if(e.getObjectType().equals(ObjectType.DATANODE)) { DataNodeConverter.parseDataNodesGpml(e, pathwayModel, data); } else if(e.getObjectType().equals(ObjectType.STATE)) { StateConverter.parseStateGpml(e, pathwayModel, data); } else if(e.getObjectType().equals(ObjectType.LINE)) { InteractionConverter.parseInteractionGpml((MLine) e, pathwayModel, data); } else if(e.getObjectType().equals(ObjectType.GRAPHLINE)) { GraphicalLineConverter.parseInteractionGpml((MLine) e, pathwayModel, data); } else if(e.getObjectType().equals(ObjectType.LABEL)) { LabelConverter.parseLabelGpml(e, pathwayModel, data); } else if(e.getObjectType().equals(ObjectType.SHAPE)) { ShapeConverter.parseShapeGpml(e, pathwayModel, data); } else if(e.getObjectType().equals(ObjectType.GROUP)) { GroupConverter.parseGroupGpml((MGroup) e, pathwayModel, data); } else if(e.getObjectType().equals(ObjectType.INFOBOX)) { InfoBoxConverter.parseInfoBoxGpml(e, pathwayModel, data); } } } public static Model convertWp(Pathway p, String wpId, String revision) { Model pathwayModel = ModelFactory.createDefaultModel(); Utils.setModelPrefix(pathwayModel); convertWp(p, wpId, revision, pathwayModel); return pathwayModel; } public static void convertWp(Pathway p, String wpId, String revision, Model pathwayModel) { Resource pathwayRes = PathwayConverter.parsePathwayInfoWp(p, wpId, revision, pathwayModel); DataHandlerWp data = new DataHandlerWp(p, wpId, revision, pathwayRes); for(PathwayElement e : p.getDataObjects()) { if(e.getObjectType().equals(ObjectType.DATANODE)) { DataNodeConverter.parseDataNodeWp(e, pathwayModel, data); } } for(PathwayElement e : p.getDataObjects()) { if(e.getObjectType().equals(ObjectType.GROUP)) { GroupConverter.parseComplexWp((MGroup) e, pathwayModel, data); } } for(PathwayElement e : p.getDataObjects()) { if(e.getObjectType().equals(ObjectType.LINE)) { InteractionConverter.parseInteractionWp((MLine)e, pathwayModel, data); } } } }
added bdb mappings
WP2RDF/src/main/java/org/wikipathways/wp2rdf/GpmlConverter.java
added bdb mappings
Java
apache-2.0
0f4721fc10c15bd5c1a8269310f49ba7887df338
0
RedRoma/aroma-data-operations,AromaTech/banana-data-operations,RedRoma/aroma-data-operations,RedRoma/banana-data-operations
/* * Copyright 2016 Aroma Tech. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tech.aroma.banana.data.memory; import com.google.common.base.Objects; import java.util.List; import java.util.Map; import java.util.function.Predicate; import java.util.stream.Collectors; import org.apache.thrift.TException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import sir.wellington.alchemy.collections.lists.Lists; import sir.wellington.alchemy.collections.maps.Maps; import tech.aroma.banana.data.InboxRepository; import tech.aroma.banana.thrift.Message; import tech.aroma.banana.thrift.User; import tech.aroma.banana.thrift.exceptions.InvalidArgumentException; import tech.sirwellington.alchemy.annotations.arguments.Required; import static tech.aroma.banana.data.assertions.RequestAssertions.validMessage; import static tech.aroma.banana.data.assertions.RequestAssertions.validUser; import static tech.sirwellington.alchemy.arguments.Arguments.checkThat; import static tech.sirwellington.alchemy.arguments.assertions.StringAssertions.nonEmptyString; /** * * @author SirWellington */ final class MemoryInboxRepository implements InboxRepository { private final static Logger LOG = LoggerFactory.getLogger(MemoryInboxRepository.class); private final Map<String, List<Message>> messagesForUser = Maps.createSynchronized(); @Override public void saveMessageForUser(@Required User user, @Required Message message) throws TException { checkThat(message) .throwing(InvalidArgumentException.class) .is(validMessage()); checkThat(user) .throwing(InvalidArgumentException.class) .is(validUser()); String userId = user.userId; List<Message> messages = messagesForUser.getOrDefault(userId, Lists.create()); messages.add(message); messagesForUser.put(userId, messages); } @Override public List<Message> getMessagesForUser(String userId) throws TException { checkUserId(userId); return messagesForUser.getOrDefault(userId, Lists.emptyList()); } @Override public void deleteMessageForUser(String userId, String messageId) throws TException { checkThat(userId, messageId) .throwing(InvalidArgumentException.class) .usingMessage("empty arguments") .are(nonEmptyString()); Predicate<Message> notEqualToMessageId = msg -> !Objects.equal(msg.messageId, messageId); List<Message> messages = messagesForUser.getOrDefault(userId, Lists.emptyList()); messages = messages.stream() .filter(notEqualToMessageId) .collect(Collectors.toList()); messagesForUser.put(userId, messages); } @Override public void deleteAllMessagesForUser(String userId) throws TException { checkUserId(userId); messagesForUser.remove(userId); } @Override public long countInboxForUser(String userId) throws TException { checkUserId(userId); return messagesForUser.getOrDefault(userId, Lists.emptyList()).size(); } @Override public boolean containsMessageInInbox(String userId, Message message) throws TException { checkUserId(userId); checkThat(message) .throwing(InvalidArgumentException.class) .is(validMessage()); String messageId = message.messageId; return this.messagesForUser.getOrDefault(userId, Lists.emptyList()) .stream() .map(Message::getMessageId) .anyMatch(id -> Objects.equal(id, messageId)); } private void checkUserId(String userId) throws InvalidArgumentException { checkThat(userId) .usingMessage("missing userId") .throwing(InvalidArgumentException.class) .is(nonEmptyString()); } }
src/main/java/tech/aroma/banana/data/memory/MemoryInboxRepository.java
/* * Copyright 2016 Aroma Tech. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tech.aroma.banana.data.memory; import com.google.common.base.Objects; import java.util.List; import java.util.Map; import java.util.function.Predicate; import java.util.stream.Collectors; import org.apache.thrift.TException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import sir.wellington.alchemy.collections.lists.Lists; import sir.wellington.alchemy.collections.maps.Maps; import tech.aroma.banana.data.InboxRepository; import tech.aroma.banana.thrift.Message; import tech.aroma.banana.thrift.User; import tech.aroma.banana.thrift.exceptions.InvalidArgumentException; import tech.sirwellington.alchemy.annotations.arguments.Required; import static tech.aroma.banana.data.assertions.RequestAssertions.validMessage; import static tech.aroma.banana.data.assertions.RequestAssertions.validUser; import static tech.sirwellington.alchemy.arguments.Arguments.checkThat; import static tech.sirwellington.alchemy.arguments.assertions.StringAssertions.nonEmptyString; import static tech.sirwellington.alchemy.arguments.Arguments.checkThat; /** * * @author SirWellington */ final class MemoryInboxRepository implements InboxRepository { private final static Logger LOG = LoggerFactory.getLogger(MemoryInboxRepository.class); private final Map<String, List<Message>> messagesForUser = Maps.createSynchronized(); @Override public void saveMessageForUser(@Required User user, @Required Message message) throws TException { checkThat(message) .throwing(InvalidArgumentException.class) .is(validMessage()); checkThat(user) .throwing(InvalidArgumentException.class) .is(validUser()); String userId = user.userId; List<Message> messages = messagesForUser.getOrDefault(userId, Lists.create()); messages.add(message); messagesForUser.put(userId, messages); } @Override public List<Message> getMessagesForUser(String userId) throws TException { checkUserId(userId); return messagesForUser.getOrDefault(userId, Lists.emptyList()); } @Override public void deleteMessageForUser(String userId, String messageId) throws TException { checkThat(userId, messageId) .throwing(InvalidArgumentException.class) .usingMessage("empty arguments") .are(nonEmptyString()); Predicate<Message> notEqualToMessageId = msg -> !Objects.equal(msg.messageId, messageId); List<Message> messages = messagesForUser.getOrDefault(userId, Lists.emptyList()); messages = messages.stream() .filter(notEqualToMessageId) .collect(Collectors.toList()); messagesForUser.put(userId, messages); } @Override public void deleteAllMessagesForUser(String userId) throws TException { checkUserId(userId); messagesForUser.remove(userId); } @Override public long countInboxForUser(String userId) throws TException { checkUserId(userId); return messagesForUser.getOrDefault(userId, Lists.emptyList()).size(); } @Override public boolean containsMessageInInbox(String userId, Message message) throws TException { checkUserId(userId); checkThat(message) .throwing(InvalidArgumentException.class) .is(validMessage()); String messageId = message.messageId; return this.messagesForUser.getOrDefault(userId, Lists.emptyList()) .stream() .map(Message::getMessageId) .anyMatch(id -> Objects.equal(id, messageId)); } private void checkUserId(String userId) throws InvalidArgumentException { checkThat(userId) .usingMessage("missing userId") .throwing(InvalidArgumentException.class) .is(nonEmptyString()); } }
MemoryInboxRepository: import statement cleanup
src/main/java/tech/aroma/banana/data/memory/MemoryInboxRepository.java
MemoryInboxRepository: import statement cleanup
Java
apache-2.0
319719a72bb25aeba0df60c6f0e5f4dc576b242d
0
SyncFree/SwiftCloud,SyncFree/SwiftCloud,SyncFree/SwiftCloud,SyncFree/SwiftCloud
/***************************************************************************** * Copyright 2011-2012 INRIA * Copyright 2011-2012 Universidade Nova de Lisboa * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *****************************************************************************/ package swift.pubsub; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentSkipListSet; import swift.crdt.CRDTIdentifier; import swift.proto.SwiftProtocolHandler; import swift.proto.UnsubscribeUpdatesReply; import swift.proto.UnsubscribeUpdatesRequest; import swift.proto.UpdatesNotification; import sys.net.api.Endpoint; import sys.net.api.rpc.RpcEndpoint; import sys.net.api.rpc.RpcHandle; import sys.net.api.rpc.RpcHandler; import sys.pubsub.impl.AbstractPubSub; import sys.scheduler.Task; import sys.utils.FifoQueue; /** * Stub for the notification system. Currently only manages unsubscriptions... * * @author smduarte * */ public class ScoutPubSubService extends AbstractPubSub<CRDTIdentifier, CommitNotification> { final String clientId; final Endpoint surrogate; final RpcEndpoint endpoint; final Set<CRDTIdentifier> subscriptions = new ConcurrentSkipListSet<CRDTIdentifier>(); final FifoQueue<UpdatesNotification> fifoQueue; final Set<CRDTIdentifier> removals = new ConcurrentSkipListSet<CRDTIdentifier>(); final Map<Long, UnsubscribeUpdatesRequest> updates = new ConcurrentHashMap<Long, UnsubscribeUpdatesRequest>(); RpcHandler replyHandler; List<Integer> gots = new ArrayList<Integer>(); boolean bound2dc = false; public ScoutPubSubService(final String clientId, RpcEndpoint endpoint, Endpoint surrogate) { this.clientId = clientId; this.endpoint = endpoint; this.surrogate = surrogate; this.replyHandler = new SwiftProtocolHandler() { protected void onReceive(RpcHandle conn, UnsubscribeUpdatesReply ack) { bound2dc = true; // System.err.println(ack.getId()); } protected void onReceive(RpcHandle conn, UpdatesNotification request) { fifoQueue.offer(request.seqN(), request); } }; this.fifoQueue = new FifoQueue<UpdatesNotification>() { public void process(UpdatesNotification p) { gots.add(p.seqN()); // System.err.println(gots); for (CommitNotification r : p.getRecords()) { ScoutPubSubService.this.notify(r.info.keySet(), r); } } }; } public boolean isSubscribed(CRDTIdentifier id) { return subscriptions.contains(id); } @Override public void unsubscribe(CRDTIdentifier id, Handler<CRDTIdentifier, CommitNotification> handler) { if (subscriptions.remove(id)) { removals.add(id); if (!updater.isScheduled()) updater.reSchedule(0.1); } } @Override public void subscribe(CRDTIdentifier id, Handler<CRDTIdentifier, CommitNotification> handler) { subscriptions.add(id); } Task updater = new Task(3) { public void run() { if (removals.size() > 0 || !bound2dc) { UnsubscribeUpdatesRequest req = new UnsubscribeUpdatesRequest(0L, clientId, removals); endpoint.send(surrogate, req, replyHandler, 0); removals.clear(); } if (!bound2dc) reSchedule(1); } }; }
src-core/swift/pubsub/ScoutPubSubService.java
/***************************************************************************** * Copyright 2011-2012 INRIA * Copyright 2011-2012 Universidade Nova de Lisboa * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *****************************************************************************/ package swift.pubsub; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentSkipListSet; import swift.crdt.CRDTIdentifier; import swift.proto.SwiftProtocolHandler; import swift.proto.UnsubscribeUpdatesReply; import swift.proto.UnsubscribeUpdatesRequest; import swift.proto.UpdatesNotification; import sys.net.api.Endpoint; import sys.net.api.rpc.RpcEndpoint; import sys.net.api.rpc.RpcHandle; import sys.net.api.rpc.RpcHandler; import sys.pubsub.impl.AbstractPubSub; import sys.scheduler.Task; import sys.utils.FifoQueue; /** * Stub for the notification system. Currently only manages unsubscriptions... * * @author smduarte * */ public class ScoutPubSubService extends AbstractPubSub<CRDTIdentifier, CommitNotification> { final String clientId; final Endpoint surrogate; final RpcEndpoint endpoint; final Set<CRDTIdentifier> subscriptions = new ConcurrentSkipListSet<CRDTIdentifier>(); final FifoQueue<UpdatesNotification> fifoQueue; final Set<CRDTIdentifier> removals = new ConcurrentSkipListSet<CRDTIdentifier>(); final Map<Long, UnsubscribeUpdatesRequest> updates = new ConcurrentHashMap<Long, UnsubscribeUpdatesRequest>(); RpcHandler replyHandler; List<Integer> gots = new ArrayList<Integer>(); boolean bound2dc = false; public ScoutPubSubService(final String clientId, RpcEndpoint endpoint, Endpoint surrogate) { this.clientId = clientId; this.endpoint = endpoint; this.surrogate = surrogate; this.replyHandler = new SwiftProtocolHandler() { protected void onReceive(RpcHandle conn, UnsubscribeUpdatesReply ack) { bound2dc = true; // System.err.println(ack.getId()); } protected void onReceive(RpcHandle conn, UpdatesNotification request) { fifoQueue.offer(request.seqN(), request); } }; this.fifoQueue = new FifoQueue<UpdatesNotification>() { public void process(UpdatesNotification p) { gots.add(p.seqN()); System.err.println(gots); for (CommitNotification r : p.getRecords()) { ScoutPubSubService.this.notify(r.info.keySet(), r); } } }; } public boolean isSubscribed(CRDTIdentifier id) { return subscriptions.contains(id); } @Override public void unsubscribe(CRDTIdentifier id, Handler<CRDTIdentifier, CommitNotification> handler) { if (subscriptions.remove(id)) { removals.add(id); if (!updater.isScheduled()) updater.reSchedule(0.1); } } @Override public void subscribe(CRDTIdentifier id, Handler<CRDTIdentifier, CommitNotification> handler) { subscriptions.add(id); } Task updater = new Task(0.25) { public void run() { if (removals.size() > 0 || !bound2dc) { UnsubscribeUpdatesRequest req = new UnsubscribeUpdatesRequest(0L, clientId, removals); endpoint.send(surrogate, req, replyHandler, 0); removals.clear(); } if (!bound2dc) reSchedule(1); } }; }
Increased delay to first attempt to bind to DC pubsub service.
src-core/swift/pubsub/ScoutPubSubService.java
Increased delay to first attempt to bind to DC pubsub service.
Java
apache-2.0
60e4471bdbb666d521d2611180109c81233793fc
0
bozimmerman/CoffeeMud,bozimmerman/CoffeeMud,bozimmerman/CoffeeMud,bozimmerman/CoffeeMud
package com.planet_ink.coffee_mud.Abilities.Druid; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2004-2018 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class Chant_Hippieness extends Chant { @Override public String ID() { return "Chant_Hippieness"; } private final static String localizedName = CMLib.lang().L("Hippieness"); @Override public String name() { return localizedName; } private final static String localizedStaticDisplay = CMLib.lang().L("(Feeling Groovy)"); @Override public String displayText() { return localizedStaticDisplay; } @Override public int classificationCode() { return Ability.ACODE_CHANT|Ability.DOMAIN_ENDURING; } @Override public int abstractQuality() { return Ability.QUALITY_OK_OTHERS; } @Override protected int canAffectCode() { return CAN_MOBS; } protected List<Pair<Clan,Integer>> oldClans=null; @Override public void affectCharStats(final MOB affected, final CharStats affectableStats) { super.affectCharStats(affected,affectableStats); affectableStats.setStat(CharStats.STAT_WISDOM,affectableStats.getStat(CharStats.STAT_WISDOM)-2); if(affectableStats.getStat(CharStats.STAT_WISDOM)<1) affectableStats.setStat(CharStats.STAT_WISDOM,1); for(final Pair<Clan,Integer> p : affected.clans()) oldClans.add(p); affected.setClan("",Integer.MIN_VALUE); // deletes all clans } @Override public boolean okMessage(final Environmental host, final CMMsg msg) { if(affected instanceof MOB) { for(final Pair<Clan,Integer> p : ((MOB)affected).clans()) oldClans.add(p); ((MOB)affected).setClan("",Integer.MIN_VALUE); // deletes all clans } if((msg.source()==affected) &&(msg.tool() instanceof Ability) &&(!msg.tool().ID().equals("FoodPrep")) &&(!msg.tool().ID().equals("Cooking")) &&(((((Ability)msg.tool()).classificationCode()&Ability.ALL_DOMAINS)==Ability.DOMAIN_CRAFTINGSKILL) ||((((Ability)msg.tool()).classificationCode()&Ability.ALL_DOMAINS)==Ability.DOMAIN_BUILDINGSKILL) ||((((Ability)msg.tool()).classificationCode()&Ability.ALL_ACODES)==Ability.ACODE_COMMON_SKILL)) &&(msg.sourceMinor()!=CMMsg.TYP_TEACH)) { msg.source().tell(L("No, man... work is so bourgeois...")); return false; } return super.okMessage(host,msg); } @Override public boolean tick(final Tickable ticking, final int tickID) { if(!super.tick(ticking,tickID)) return false; if(affected instanceof MOB) { final MOB mob=(MOB)affected; for(final Pair<Clan,Integer> p : mob.clans()) oldClans.add(p); mob.setClan("",Integer.MIN_VALUE); // deletes all clans final boolean mouthed=mob.fetchFirstWornItem(Wearable.WORN_MOUTH)!=null; final Room R=mob.location(); if((!mouthed)&&(R!=null)&&(R.numItems()>0)) { final Item I=R.getRandomItem(); if((I!=null)&&(I.fitsOn(Wearable.WORN_MOUTH))) CMLib.commands().postGet(mob,I.container(),I,false); } Ability A=mob.fetchEffect("Fighter_Bezerk"); if(A!=null) A.unInvoke(); A=mob.fetchEffect("Song_Rage"); if(A!=null) A.unInvoke(); if(mob.numItems()>0) { final Item I=mob.getRandomItem(); if(mouthed) { if((I!=null)&&(!I.amWearingAt(Wearable.IN_INVENTORY))&&(!I.amWearingAt(Wearable.WORN_MOUTH))) CMLib.commands().postRemove(mob,I,false); } else if((I!=null)&&(I instanceof Light)&&(I.fitsOn(Wearable.WORN_MOUTH))) { if((I instanceof Container) &&(((Container)I).containTypes()==Container.CONTAIN_SMOKEABLES) &&(!((Container)I).hasContent())) { final Item smoke=CMClass.getItem("GenResource"); if(smoke!=null) { smoke.setName(L("some smoke")); smoke.setDescription(L("Looks liefy and green.")); smoke.setDisplayText(L("some smoke is sitting here.")); smoke.setMaterial(RawMaterial.RESOURCE_HEMP); smoke.basePhyStats().setWeight(1); smoke.setBaseValue(25); smoke.recoverPhyStats(); smoke.text(); mob.addItem(smoke); smoke.setContainer((Container)I); } } mob.doCommand(CMParms.parse("WEAR \""+I.Name()+"\""),MUDCmdProcessor.METAFLAG_FORCED); } else if((I!=null)&&(!I.amWearingAt(Wearable.IN_INVENTORY))&&(!I.amWearingAt(Wearable.WORN_MOUTH))) CMLib.commands().postRemove(mob,I,false); } } return true; } @Override public void unInvoke() { // undo the affects of this spell if(!(affected instanceof MOB)) return; final MOB mob=(MOB)affected; super.unInvoke(); if(canBeUninvoked()) { for(final Pair<Clan,Integer> p : oldClans) mob.setClan(p.first.clanID(),p.second.intValue()); mob.tell(L("You don't feel quite so groovy.")); } } @Override public int castingQuality(MOB mob, Physical target) { if(mob!=null) { if(mob.isInCombat()) return Ability.QUALITY_INDIFFERENT; if(target instanceof MOB) { if(CMLib.flags().isAnimalIntelligence((MOB)target)) return Ability.QUALITY_INDIFFERENT; } } return super.castingQuality(mob,target); } @Override public boolean invoke(MOB mob, List<String> commands, Physical givenTarget, boolean auto, int asLevel) { final MOB target=getTarget(mob,commands,givenTarget); if(target==null) return false; if(CMLib.flags().isAnimalIntelligence(target)) { mob.tell(L("@x1 is not smart enough to be a hippy.",target.name(mob))); return false; } if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; final boolean success=proficiencyCheck(mob,0,auto); if(success) { invoker=mob; final CMMsg msg=CMClass.getMsg(mob,target,this,(target.isMonster()?0:CMMsg.MASK_MALICIOUS)|verbalCastCode(mob,target,auto),auto?"":L("^S<S-NAME> chant(s) to <T-NAMESELF>!^?")); final CMMsg msg2=CMClass.getMsg(mob,target,this,(target.isMonster()?0:CMMsg.MASK_MALICIOUS)|CMMsg.MSK_CAST_VERBAL|CMMsg.TYP_DISEASE|(auto?CMMsg.MASK_ALWAYS:0),null); if((mob.location().okMessage(mob,msg))&&(mob.location().okMessage(mob,msg2))) { mob.location().send(mob,msg); mob.location().send(mob,msg2); if((msg.value()<=0)&&(msg2.value()<=0)) { oldClans=new LinkedList<Pair<Clan,Integer>>(); for(final Pair<Clan,Integer> p : target.clans()) oldClans.add(p); target.setClan("",Integer.MIN_VALUE); // deletes all clans CMLib.commands().postSay(target,null,L("Far out..."),false,false); maliciousAffect(mob,target,asLevel,0,verbalCastMask(mob,target,auto)|CMMsg.TYP_MIND); } } } else return beneficialWordsFizzle(mob,target,L("<S-NAME> chant(s) to <T-NAMESELF>, but nothing more happens.")); // return whether it worked return success; } }
com/planet_ink/coffee_mud/Abilities/Druid/Chant_Hippieness.java
package com.planet_ink.coffee_mud.Abilities.Druid; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2004-2018 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class Chant_Hippieness extends Chant { @Override public String ID() { return "Chant_Hippieness"; } private final static String localizedName = CMLib.lang().L("Hippieness"); @Override public String name() { return localizedName; } private final static String localizedStaticDisplay = CMLib.lang().L("(Feeling Groovy)"); @Override public String displayText() { return localizedStaticDisplay; } @Override public int classificationCode() { return Ability.ACODE_CHANT|Ability.DOMAIN_ENDURING; } @Override public int abstractQuality() { return Ability.QUALITY_BENEFICIAL_SELF; } @Override protected int canAffectCode() { return CAN_MOBS; } protected List<Pair<Clan,Integer>> oldClans=null; @Override public void affectCharStats(final MOB affected, final CharStats affectableStats) { super.affectCharStats(affected,affectableStats); affectableStats.setStat(CharStats.STAT_WISDOM,affectableStats.getStat(CharStats.STAT_WISDOM)-2); if(affectableStats.getStat(CharStats.STAT_WISDOM)<1) affectableStats.setStat(CharStats.STAT_WISDOM,1); for(final Pair<Clan,Integer> p : affected.clans()) oldClans.add(p); affected.setClan("",Integer.MIN_VALUE); // deletes all clans } @Override public boolean okMessage(final Environmental host, final CMMsg msg) { if(affected instanceof MOB) { for(final Pair<Clan,Integer> p : ((MOB)affected).clans()) oldClans.add(p); ((MOB)affected).setClan("",Integer.MIN_VALUE); // deletes all clans } if((msg.source()==affected) &&(msg.tool() instanceof Ability) &&(!msg.tool().ID().equals("FoodPrep")) &&(!msg.tool().ID().equals("Cooking")) &&(((((Ability)msg.tool()).classificationCode()&Ability.ALL_DOMAINS)==Ability.DOMAIN_CRAFTINGSKILL) ||((((Ability)msg.tool()).classificationCode()&Ability.ALL_DOMAINS)==Ability.DOMAIN_BUILDINGSKILL) ||((((Ability)msg.tool()).classificationCode()&Ability.ALL_ACODES)==Ability.ACODE_COMMON_SKILL)) &&(msg.sourceMinor()!=CMMsg.TYP_TEACH)) { msg.source().tell(L("No, man... work is so bourgeois...")); return false; } return super.okMessage(host,msg); } @Override public boolean tick(final Tickable ticking, final int tickID) { if(!super.tick(ticking,tickID)) return false; if(affected instanceof MOB) { final MOB mob=(MOB)affected; for(final Pair<Clan,Integer> p : mob.clans()) oldClans.add(p); mob.setClan("",Integer.MIN_VALUE); // deletes all clans final boolean mouthed=mob.fetchFirstWornItem(Wearable.WORN_MOUTH)!=null; final Room R=mob.location(); if((!mouthed)&&(R!=null)&&(R.numItems()>0)) { final Item I=R.getRandomItem(); if((I!=null)&&(I.fitsOn(Wearable.WORN_MOUTH))) CMLib.commands().postGet(mob,I.container(),I,false); } Ability A=mob.fetchEffect("Fighter_Bezerk"); if(A!=null) A.unInvoke(); A=mob.fetchEffect("Song_Rage"); if(A!=null) A.unInvoke(); if(mob.numItems()>0) { final Item I=mob.getRandomItem(); if(mouthed) { if((I!=null)&&(!I.amWearingAt(Wearable.IN_INVENTORY))&&(!I.amWearingAt(Wearable.WORN_MOUTH))) CMLib.commands().postRemove(mob,I,false); } else if((I!=null)&&(I instanceof Light)&&(I.fitsOn(Wearable.WORN_MOUTH))) { if((I instanceof Container) &&(((Container)I).containTypes()==Container.CONTAIN_SMOKEABLES) &&(!((Container)I).hasContent())) { final Item smoke=CMClass.getItem("GenResource"); if(smoke!=null) { smoke.setName(L("some smoke")); smoke.setDescription(L("Looks liefy and green.")); smoke.setDisplayText(L("some smoke is sitting here.")); smoke.setMaterial(RawMaterial.RESOURCE_HEMP); smoke.basePhyStats().setWeight(1); smoke.setBaseValue(25); smoke.recoverPhyStats(); smoke.text(); mob.addItem(smoke); smoke.setContainer((Container)I); } } mob.doCommand(CMParms.parse("WEAR \""+I.Name()+"\""),MUDCmdProcessor.METAFLAG_FORCED); } else if((I!=null)&&(!I.amWearingAt(Wearable.IN_INVENTORY))&&(!I.amWearingAt(Wearable.WORN_MOUTH))) CMLib.commands().postRemove(mob,I,false); } } return true; } @Override public void unInvoke() { // undo the affects of this spell if(!(affected instanceof MOB)) return; final MOB mob=(MOB)affected; super.unInvoke(); if(canBeUninvoked()) { for(final Pair<Clan,Integer> p : oldClans) mob.setClan(p.first.clanID(),p.second.intValue()); mob.tell(L("You don't feel quite so groovy.")); } } @Override public int castingQuality(MOB mob, Physical target) { if(mob!=null) { if(mob.isInCombat()) return Ability.QUALITY_INDIFFERENT; if(target instanceof MOB) { if(CMLib.flags().isAnimalIntelligence((MOB)target)) return Ability.QUALITY_INDIFFERENT; } } return super.castingQuality(mob,target); } @Override public boolean invoke(MOB mob, List<String> commands, Physical givenTarget, boolean auto, int asLevel) { final MOB target=getTarget(mob,commands,givenTarget); if(target==null) return false; if(CMLib.flags().isAnimalIntelligence(target)) { mob.tell(L("@x1 is not smart enough to be a hippy.",target.name(mob))); return false; } if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; final boolean success=proficiencyCheck(mob,0,auto); if(success) { invoker=mob; final CMMsg msg=CMClass.getMsg(mob,target,this,(target.isMonster()?0:CMMsg.MASK_MALICIOUS)|verbalCastCode(mob,target,auto),auto?"":L("^S<S-NAME> chant(s) to <T-NAMESELF>!^?")); final CMMsg msg2=CMClass.getMsg(mob,target,this,(target.isMonster()?0:CMMsg.MASK_MALICIOUS)|CMMsg.MSK_CAST_VERBAL|CMMsg.TYP_DISEASE|(auto?CMMsg.MASK_ALWAYS:0),null); if((mob.location().okMessage(mob,msg))&&(mob.location().okMessage(mob,msg2))) { mob.location().send(mob,msg); mob.location().send(mob,msg2); if((msg.value()<=0)&&(msg2.value()<=0)) { oldClans=new LinkedList<Pair<Clan,Integer>>(); for(final Pair<Clan,Integer> p : target.clans()) oldClans.add(p); target.setClan("",Integer.MIN_VALUE); // deletes all clans CMLib.commands().postSay(target,null,L("Far out..."),false,false); maliciousAffect(mob,target,asLevel,0,verbalCastMask(mob,target,auto)|CMMsg.TYP_MIND); } } } else return beneficialWordsFizzle(mob,target,L("<S-NAME> chant(s) to <T-NAMESELF>, but nothing more happens.")); // return whether it worked return success; } }
hippiness is other directed git-svn-id: 0cdf8356e41b2d8ccbb41bb76c82068fe80b2514@16742 0d6f1817-ed0e-0410-87c9-987e46238f29
com/planet_ink/coffee_mud/Abilities/Druid/Chant_Hippieness.java
hippiness is other directed
Java
apache-2.0
34fc2e33677028b00c684bc12c795b218918fab9
0
vosskaem/jasperstarter,vosskaem/jasperstarter,vosskaem/jasperstarter
/* * Copyright 2012 Cenote GmbH. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.cenote.jasperstarter; import de.cenote.jasperstarter.types.Command; import de.cenote.jasperstarter.types.DbType; import de.cenote.jasperstarter.types.Dest; import de.cenote.jasperstarter.types.OutputFormat; import de.cenote.tools.classpath.ApplicationClasspath; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.net.URISyntaxException; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Properties; import java.util.logging.Level; import java.util.logging.Logger; import javax.print.PrintService; import javax.print.PrintServiceLookup; import net.sf.jasperreports.engine.JRException; import net.sourceforge.argparse4j.ArgumentParsers; import net.sourceforge.argparse4j.impl.Arguments; import net.sourceforge.argparse4j.inf.Argument; import net.sourceforge.argparse4j.inf.ArgumentGroup; import net.sourceforge.argparse4j.inf.ArgumentParser; import net.sourceforge.argparse4j.inf.ArgumentParserException; import net.sourceforge.argparse4j.inf.Namespace; import net.sourceforge.argparse4j.inf.Subparser; import net.sourceforge.argparse4j.inf.Subparsers; import org.apache.commons.io.IOCase; import org.apache.commons.io.filefilter.WildcardFileFilter; import org.apache.commons.lang.LocaleUtils; /** * * @author Volker Voßkämper <vvo at cenote.de> * @version $Revision: 349bcea5768c:59 branch:default $ */ public class App { private static App instance = null; private Properties applicationProperties = null; private Namespace namespace = null; private Map<String, Argument> allArguments = null; private App() { this.applicationProperties = new Properties(); try { this.applicationProperties.load(this.getClass(). getResourceAsStream("/de/cenote/jasperstarter/application.properties")); } catch (IOException ex) { Logger.getLogger(App.class.getName()).log(Level.SEVERE, null, ex); System.exit(1); } } public static App getInstance() { if (App.instance == null) { App.instance = new App(); } return App.instance; } /** * @param args the command line arguments */ public static void main(String[] args) { App app = App.getInstance(); // create the command line parser ArgumentParser parser = app.createArgumentParser(); if (args.length == 0) { System.out.println(parser.formatUsage()); System.out.println("type: jasperstarter -h to get help"); System.exit(0); } app.namespace = app.parseArgumentParser(args, parser); // setting locale if given if (app.namespace.get(Dest.LOCALE) != null) { Locale.setDefault(new Locale((String) app.namespace.get(Dest.LOCALE))); } switch (Command.getCommand(app.namespace.getString(Dest.COMMAND))) { case COMPILE: case CP: app.compile(); break; case PROCESS: case PR: app.processReport(); break; case LIST_PRINTERS: case LP: app.listPrinters(); break; } } private void compile() { boolean error = false; App app = App.getInstance(); File input = new File(app.namespace.getString(Dest.INPUT)); if (input.isFile()) { try { Report report = new Report(input); report.compileToFile(); } catch (IllegalArgumentException ex) { System.err.println(ex.getMessage()); error = true; } } else if (input.isDirectory()) { // compile all .jrxml files in this directory FileFilter fileFilter = new WildcardFileFilter("*.jrxml", IOCase.INSENSITIVE); File[] files = input.listFiles(fileFilter); for (File file : files) { try { System.out.println("Compiling: \"" + file + "\""); Report report = new Report(file); report.compileToFile(); } catch (IllegalArgumentException ex) { System.err.println(ex.getMessage()); error = true; } } } else { System.err.println("Error: not a file: " + input.getName()); error = true; } if (error) { System.exit(1); } else { System.exit(0); } } private void processReport() { App app = App.getInstance(); // add the jdbc dir to classpath try { if (app.namespace.get(Dest.JDBC_DIR) != null) { File jdbcDir = new File(app.namespace.get(Dest.JDBC_DIR).toString()); if (app.namespace.getBoolean(Dest.DEBUG)) { System.out.println("Using jdbc-dir: " + jdbcDir.getAbsolutePath()); } ApplicationClasspath.addJars(jdbcDir.getAbsolutePath()); } else { ApplicationClasspath.addJarsRelative("../jdbc"); } } catch (IOException ex) { Logger.getLogger(App.class.getName()).log(Level.SEVERE, null, ex); System.exit(1); } catch (URISyntaxException ex) { Logger.getLogger(App.class.getName()).log(Level.SEVERE, null, ex); System.exit(1); } Report report = null; try { report = new Report(new File(app.namespace.getString(Dest.INPUT)).getAbsoluteFile()); } catch (IllegalArgumentException ex) { System.err.println(ex.getMessage()); System.exit(1); } report.fill(); // produces visible output file if OutputFormat.jrprint is set List<OutputFormat> formats = app.namespace.getList(Dest.OUTPUT_FORMATS); Boolean viewIt = false; Boolean printIt = false; try { for (OutputFormat f : formats) { // OutputFormat.jrprint is handled in fill() if (OutputFormat.print.equals(f)) { printIt = true; } else if (OutputFormat.view.equals(f)) { viewIt = true; } else if (OutputFormat.pdf.equals(f)) { report.exportPdf(); } else if (OutputFormat.docx.equals(f)) { report.exportDocx(); } else if (OutputFormat.odt.equals(f)) { report.exportOdt(); } else if (OutputFormat.rtf.equals(f)) { report.exportRtf(); } else if (OutputFormat.html.equals(f)) { report.exportHtml(); } else if (OutputFormat.xml.equals(f)) { report.exportXml(); } else if (OutputFormat.xls.equals(f)) { report.exportXls(); } else if (OutputFormat.xlsx.equals(f)) { report.exportXlsx(); } else if (OutputFormat.csv.equals(f)) { report.exportCsv(); } else if (OutputFormat.ods.equals(f)) { report.exportOds(); } else if (OutputFormat.pptx.equals(f)) { report.exportPptx(); } else if (OutputFormat.xhtml.equals(f)) { report.exportXhtml(); } } if (viewIt) { report.view(); } else if (printIt) { // print directly only if viewer is not activated report.print(); } } catch (JRException ex) { Logger.getLogger(Db.class.getName()).log(Level.SEVERE, null, ex); System.exit(1); } } private void listPrinters() { PrintService defaultService = PrintServiceLookup.lookupDefaultPrintService(); System.out.println("Default printer:"); System.out.println("-----------------"); System.out.println((defaultService == null) ? "--- not set ---" : defaultService.getName()); System.out.println(""); PrintService[] services = PrintServiceLookup.lookupPrintServices(null, null); System.out.println("Available printers:"); System.out.println("--------------------"); for (PrintService service : services) { System.out.println(service.getName()); } } private Properties getApplicationProperties() { return this.applicationProperties; } private ArgumentParser createArgumentParser() { this.allArguments = new HashMap<String, Argument>(); String jasperversion = Package.getPackage("net.sf.jasperreports.engine"). getImplementationVersion(); StringBuffer sb = new StringBuffer("JasperStarter ") .append(applicationProperties.getProperty("application.version")) .append(" Rev ").append(applicationProperties.getProperty("application.revision")) .append(" ").append(applicationProperties.getProperty("application.revision.date")) .append("\n").append(" - JasperReports: ").append(jasperversion); ArgumentParser parser = ArgumentParsers.newArgumentParser("jasperstarter", false, "-", "@") .version(sb.toString()); //ArgumentGroup groupOptions = parser.addArgumentGroup("options"); parser.addArgument("-h", "--help").action(Arguments.help()).help("show this help message and exit"); parser.addArgument("--locale").dest(Dest.LOCALE).metavar("<lang>").help("set locale with two-letter ISO-639 code"); parser.addArgument("-v", "--verbose").dest(Dest.DEBUG).action(Arguments.storeTrue()).help("display additional messages"); parser.addArgument("-V", "--version").action(Arguments.version()).help("display version information and exit"); Subparsers subparsers = parser.addSubparsers().title("commands"). help("type <cmd> -h to get help on command").metavar("<cmd>"). dest(Dest.COMMAND); Subparser parserCompile = subparsers.addParser("cp", true).help("compile - compile reports"); createCompileArguments(parserCompile); Subparser parserProcess = subparsers.addParser("pr", true).help("process - view, print or export an existing report"); createProcessArguments(parserProcess); // @todo: creating aliases does not work for now because of the ambigoius allArguments elements !! // This does NOT work: //Subparser parserProc = subparsers.addParser("proc", true).help("alias for command process"); //createProcessArguments(parserProc); Subparser parserListPrinters = subparsers.addParser("lp", true). help("list printers - lists available printers on this system"); return parser; } private void createCompileArguments(Subparser parser) { ArgumentGroup groupOptions = parser.addArgumentGroup("options"); groupOptions.addArgument("-i").metavar("<file>").dest(Dest.INPUT).required(true).help("input file (.jrxml) or directory"); groupOptions.addArgument("-o").metavar("<file>").dest(Dest.OUTPUT).help("directory or basename of outputfile(s)"); } private void createProcessArguments(Subparser parser) { ArgumentGroup groupOptions = parser.addArgumentGroup("options"); groupOptions.addArgument("-f").metavar("<fmt>").dest(Dest.OUTPUT_FORMATS). required(true).nargs("+").type(Arguments.enumType(OutputFormat.class)). help("view, print, pdf, rtf, xls, xlsx, docx, odt, ods, pptx, csv, html, xhtml, xml, jrprint"); groupOptions.addArgument("-i").metavar("<file>").dest(Dest.INPUT).required(true).help("input file (.jrxml|.jasper|.jrprint)"); groupOptions.addArgument("-o").metavar("<file>").dest(Dest.OUTPUT).help("directory or basename of outputfile(s)"); //groupOptions.addArgument("-h", "--help").action(Arguments.help()).help("show this help message and exit"); ArgumentGroup groupCompileOptions = parser.addArgumentGroup("compile options"); groupCompileOptions.addArgument("-w", "--write-jasper"). dest(Dest.WRITE_JASPER).action(Arguments.storeTrue()).help("write .jasper file to imput dir if jrxml is prcessed"); ArgumentGroup groupFillOptions = parser.addArgumentGroup("fill options"); groupFillOptions.addArgument("-P").metavar("<p>").dest(Dest.PARAMS) .nargs("+").help( "report parameter: name=type:value [...] | types: string, int, double, date, image, locale"); groupFillOptions.addArgument("-k", "--keep").dest(Dest.KEEP).action(Arguments.storeTrue()). help("don't delete the temporary .jrprint file. OBSOLETE use output format jrprint"); ArgumentGroup groupDbOptions = parser.addArgumentGroup("db options"); groupDbOptions.addArgument("-t").metavar("<dbtype>").dest(Dest.DB_TYPE). required(false).type(Arguments.enumType(DbType.class)).setDefault(DbType.none). help("database type: none, mysql, postgres, oracle, generic"); Argument argDbHost = groupDbOptions.addArgument("-H").metavar("<dbhost>").dest(Dest.DB_HOST).help("database host"); Argument argDbUser = groupDbOptions.addArgument("-u").metavar("<dbuser>").dest(Dest.DB_USER).help("database user"); Argument argDbPasswd = groupDbOptions.addArgument("-p").metavar("<dbpasswd>").dest(Dest.DB_PASSWD).setDefault("").help("database password"); Argument argDbName = groupDbOptions.addArgument("-n").metavar("<dbname>").dest(Dest.DB_NAME).help("database name"); Argument argDbSid = groupDbOptions.addArgument("--db-sid").metavar("<sid>").dest(Dest.DB_SID).help("oracle sid"); Argument argDbPort = groupDbOptions.addArgument("--db-port").metavar("<port>").dest(Dest.DB_PORT).type(Integer.class).help("database port"); Argument argDbDriver = groupDbOptions.addArgument("--db-driver").metavar("<name>").dest(Dest.DB_DRIVER).help("jdbc driver class name for use with type: generic"); Argument argDbUrl = groupDbOptions.addArgument("--db-url").metavar("<jdbcUrl>").dest(Dest.DB_URL).help("jdbc url without user, passwd with type:generic"); groupDbOptions.addArgument("--jdbc-dir").metavar("<dir>").dest(Dest.JDBC_DIR).type(File.class).help("directory where jdbc driver jars are located. Defaults to ./jdbc"); ArgumentGroup groupPrintOptions = parser.addArgumentGroup("print options"); groupPrintOptions.addArgument("-N").metavar("<printername>").dest(Dest.PRINTER_NAME).help("name of printer"); groupPrintOptions.addArgument("-d").dest(Dest.WITH_PRINT_DIALOG).action(Arguments.storeTrue()).help("show print dialog when printing"); groupPrintOptions.addArgument("-s").metavar("<reportname>").dest(Dest.REPORT_NAME).help("set internal report/document name when printing"); allArguments.put(argDbHost.getDest(), argDbHost); allArguments.put(argDbUser.getDest(), argDbUser); allArguments.put(argDbPasswd.getDest(), argDbPasswd); allArguments.put(argDbName.getDest(), argDbName); allArguments.put(argDbSid.getDest(), argDbSid); allArguments.put(argDbPort.getDest(), argDbPort); allArguments.put(argDbDriver.getDest(), argDbDriver); allArguments.put(argDbUrl.getDest(), argDbUrl); } private Namespace parseArgumentParser(String[] args, ArgumentParser parser) { Namespace ns = null; try { ns = parser.parseArgs(args); // change some arguments to required depending on db-type if (ns.get(Dest.DB_TYPE) != null) { if (ns.get(Dest.DB_TYPE).equals(DbType.none)) { } else if (ns.get(Dest.DB_TYPE).equals(DbType.mysql)) { allArguments.get(Dest.DB_HOST).required(true); allArguments.get(Dest.DB_USER).required(true); allArguments.get(Dest.DB_NAME).required(true); allArguments.get(Dest.DB_PORT).setDefault(DbType.mysql.getPort()); } else if (ns.get(Dest.DB_TYPE).equals(DbType.postgres)) { allArguments.get(Dest.DB_HOST).required(true); allArguments.get(Dest.DB_USER).required(true); allArguments.get(Dest.DB_NAME).required(true); allArguments.get(Dest.DB_PORT).setDefault(DbType.postgres.getPort()); } else if (ns.get(Dest.DB_TYPE).equals(DbType.oracle)) { allArguments.get(Dest.DB_HOST).required(true); allArguments.get(Dest.DB_USER).required(true); allArguments.get(Dest.DB_PASSWD).required(true); allArguments.get(Dest.DB_SID).required(true); allArguments.get(Dest.DB_PORT).setDefault(DbType.oracle.getPort()); } else if (ns.get(Dest.DB_TYPE).equals(DbType.generic)) { allArguments.get(Dest.DB_USER).required(true); allArguments.get(Dest.DB_DRIVER).required(true); allArguments.get(Dest.DB_URL).required(true); } } // parse again so changed arguments become effectiv ns = parser.parseArgs(args); } catch (ArgumentParserException ex) { parser.handleError(ex); System.exit(1); } if (ns.getBoolean(Dest.DEBUG)) { System.out.print("Command line:"); for (String arg : args) { System.out.print(" " + arg); } System.out.print("\n"); System.out.println(ns); } return ns; } /** * @return the namespace */ public Namespace getNamespace() { return namespace; } }
src/main/java/de/cenote/jasperstarter/App.java
/* * Copyright 2012 Cenote GmbH. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.cenote.jasperstarter; import de.cenote.jasperstarter.types.Command; import de.cenote.jasperstarter.types.DbType; import de.cenote.jasperstarter.types.Dest; import de.cenote.jasperstarter.types.OutputFormat; import de.cenote.tools.classpath.ApplicationClasspath; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.net.URISyntaxException; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Properties; import java.util.logging.Level; import java.util.logging.Logger; import javax.print.PrintService; import javax.print.PrintServiceLookup; import net.sf.jasperreports.engine.JRException; import net.sourceforge.argparse4j.ArgumentParsers; import net.sourceforge.argparse4j.impl.Arguments; import net.sourceforge.argparse4j.inf.Argument; import net.sourceforge.argparse4j.inf.ArgumentGroup; import net.sourceforge.argparse4j.inf.ArgumentParser; import net.sourceforge.argparse4j.inf.ArgumentParserException; import net.sourceforge.argparse4j.inf.Namespace; import net.sourceforge.argparse4j.inf.Subparser; import net.sourceforge.argparse4j.inf.Subparsers; import org.apache.commons.io.IOCase; import org.apache.commons.io.filefilter.WildcardFileFilter; /** * * @author Volker Voßkämper <vvo at cenote.de> * @version $Revision: 349bcea5768c:59 branch:default $ */ public class App { private static App instance = null; private Properties applicationProperties = null; private Namespace namespace = null; private Map<String, Argument> allArguments = null; private App() { this.applicationProperties = new Properties(); try { this.applicationProperties.load(this.getClass(). getResourceAsStream("/de/cenote/jasperstarter/application.properties")); } catch (IOException ex) { Logger.getLogger(App.class.getName()).log(Level.SEVERE, null, ex); System.exit(1); } } public static App getInstance() { if (App.instance == null) { App.instance = new App(); } return App.instance; } /** * @param args the command line arguments */ public static void main(String[] args) { App app = App.getInstance(); // create the command line parser ArgumentParser parser = app.createArgumentParser(); if (args.length == 0) { System.out.println(parser.formatUsage()); System.out.println("type: jasperstarter -h to get help"); System.exit(0); } app.namespace = app.parseArgumentParser(args, parser); // setting locale if given if (app.namespace.get(Dest.LOCALE) != null) { Locale.setDefault(new Locale((String) app.namespace.get(Dest.LOCALE))); } switch (Command.getCommand(app.namespace.getString(Dest.COMMAND))) { case COMPILE: case CP: app.compile(); break; case PROCESS: case PR: app.processReport(); break; case LIST_PRINTERS: case LP: app.listPrinters(); break; } } private void compile() { boolean error = false; App app = App.getInstance(); File input = new File(app.namespace.getString(Dest.INPUT)); if (input.isFile()) { try { Report report = new Report(input); report.compileToFile(); } catch (IllegalArgumentException ex) { System.err.println(ex.getMessage()); error = true; } } else if (input.isDirectory()) { // compile all .jrxml files in this directory FileFilter fileFilter = new WildcardFileFilter("*.jrxml", IOCase.INSENSITIVE); File[] files = input.listFiles(fileFilter); for (File file : files) { try { System.out.println("Compiling: \"" + file + "\""); Report report = new Report(file); report.compileToFile(); } catch (IllegalArgumentException ex) { System.err.println(ex.getMessage()); error = true; } } } else { System.err.println("Error: not a file: " + input.getName()); error = true; } if (error) { System.exit(1); } else { System.exit(0); } } private void processReport() { App app = App.getInstance(); // add the jdbc dir to classpath try { if (app.namespace.get(Dest.JDBC_DIR) != null) { File jdbcDir = new File(app.namespace.get(Dest.JDBC_DIR).toString()); if (app.namespace.getBoolean(Dest.DEBUG)) { System.out.println("Using jdbc-dir: " + jdbcDir.getAbsolutePath()); } ApplicationClasspath.addJars(jdbcDir.getAbsolutePath()); } else { ApplicationClasspath.addJarsRelative("../jdbc"); } } catch (IOException ex) { Logger.getLogger(App.class.getName()).log(Level.SEVERE, null, ex); System.exit(1); } catch (URISyntaxException ex) { Logger.getLogger(App.class.getName()).log(Level.SEVERE, null, ex); System.exit(1); } Report report = null; try { report = new Report(new File(app.namespace.getString(Dest.INPUT)).getAbsoluteFile()); } catch (IllegalArgumentException ex) { System.err.println(ex.getMessage()); System.exit(1); } report.fill(); // produces visible output file if OutputFormat.jrprint is set List<OutputFormat> formats = app.namespace.getList(Dest.OUTPUT_FORMATS); Boolean viewIt = false; Boolean printIt = false; try { for (OutputFormat f : formats) { // OutputFormat.jrprint is handled in fill() if (OutputFormat.print.equals(f)) { printIt = true; } else if (OutputFormat.view.equals(f)) { viewIt = true; } else if (OutputFormat.pdf.equals(f)) { report.exportPdf(); } else if (OutputFormat.docx.equals(f)) { report.exportDocx(); } else if (OutputFormat.odt.equals(f)) { report.exportOdt(); } else if (OutputFormat.rtf.equals(f)) { report.exportRtf(); } else if (OutputFormat.html.equals(f)) { report.exportHtml(); } else if (OutputFormat.xml.equals(f)) { report.exportXml(); } else if (OutputFormat.xls.equals(f)) { report.exportXls(); } else if (OutputFormat.xlsx.equals(f)) { report.exportXlsx(); } else if (OutputFormat.csv.equals(f)) { report.exportCsv(); } else if (OutputFormat.ods.equals(f)) { report.exportOds(); } else if (OutputFormat.pptx.equals(f)) { report.exportPptx(); } else if (OutputFormat.xhtml.equals(f)) { report.exportXhtml(); } } if (viewIt) { report.view(); } else if (printIt) { // print directly only if viewer is not activated report.print(); } } catch (JRException ex) { Logger.getLogger(Db.class.getName()).log(Level.SEVERE, null, ex); System.exit(1); } } private void listPrinters() { PrintService defaultService = PrintServiceLookup.lookupDefaultPrintService(); System.out.println("Default printer:"); System.out.println("-----------------"); System.out.println((defaultService == null) ? "--- not set ---" : defaultService.getName()); System.out.println(""); PrintService[] services = PrintServiceLookup.lookupPrintServices(null, null); System.out.println("Available printers:"); System.out.println("--------------------"); for (PrintService service : services) { System.out.println(service.getName()); } } private Properties getApplicationProperties() { return this.applicationProperties; } private ArgumentParser createArgumentParser() { this.allArguments = new HashMap<String, Argument>(); String jasperversion = Package.getPackage("net.sf.jasperreports.engine"). getImplementationVersion(); StringBuffer sb = new StringBuffer("JasperStarter ") .append(applicationProperties.getProperty("application.version")) .append(" Rev ").append(applicationProperties.getProperty("application.revision")) .append(" ").append(applicationProperties.getProperty("application.revision.date")) .append("\n").append(" - JasperReports: ").append(jasperversion); ArgumentParser parser = ArgumentParsers.newArgumentParser("jasperstarter", false, "-", "@") .version(sb.toString()); //ArgumentGroup groupOptions = parser.addArgumentGroup("options"); parser.addArgument("-h", "--help").action(Arguments.help()).help("show this help message and exit"); parser.addArgument("--locale").dest(Dest.LOCALE).metavar("<lang>").help("set locale with two-letter ISO-639 code"); parser.addArgument("-v", "--verbose").dest(Dest.DEBUG).action(Arguments.storeTrue()).help("display additional messages"); parser.addArgument("-V", "--version").action(Arguments.version()).help("display version information and exit"); Subparsers subparsers = parser.addSubparsers().title("commands"). help("type <cmd> -h to get help on command").metavar("<cmd>"). dest(Dest.COMMAND); Subparser parserCompile = subparsers.addParser("cp", true).help("compile - compile reports"); createCompileArguments(parserCompile); Subparser parserProcess = subparsers.addParser("pr", true).help("process - view, print or export an existing report"); createProcessArguments(parserProcess); // @todo: creating aliases does not work for now because of the ambigoius allArguments elements !! // This does NOT work: //Subparser parserProc = subparsers.addParser("proc", true).help("alias for command process"); //createProcessArguments(parserProc); Subparser parserListPrinters = subparsers.addParser("lp", true). help("list printers - lists available printers on this system"); return parser; } private void createCompileArguments(Subparser parser) { ArgumentGroup groupOptions = parser.addArgumentGroup("options"); groupOptions.addArgument("-i").metavar("<file>").dest(Dest.INPUT).required(true).help("input file (.jrxml) or directory"); groupOptions.addArgument("-o").metavar("<file>").dest(Dest.OUTPUT).help("directory or basename of outputfile(s)"); } private void createProcessArguments(Subparser parser) { ArgumentGroup groupOptions = parser.addArgumentGroup("options"); groupOptions.addArgument("-f").metavar("<fmt>").dest(Dest.OUTPUT_FORMATS). required(true).nargs("+").type(Arguments.enumType(OutputFormat.class)). help("view, print, pdf, rtf, xls, xlsx, docx, odt, ods, pptx, csv, html, xhtml, xml, jrprint"); groupOptions.addArgument("-i").metavar("<file>").dest(Dest.INPUT).required(true).help("input file (.jrxml|.jasper|.jrprint)"); groupOptions.addArgument("-o").metavar("<file>").dest(Dest.OUTPUT).help("directory or basename of outputfile(s)"); //groupOptions.addArgument("-h", "--help").action(Arguments.help()).help("show this help message and exit"); ArgumentGroup groupCompileOptions = parser.addArgumentGroup("compile options"); groupCompileOptions.addArgument("-w", "--write-jasper"). dest(Dest.WRITE_JASPER).action(Arguments.storeTrue()).help("write .jasper file to imput dir if jrxml is prcessed"); ArgumentGroup groupFillOptions = parser.addArgumentGroup("fill options"); groupFillOptions.addArgument("-P").metavar("<p>").dest(Dest.PARAMS).nargs("+").help("report parameter: name=type:value [...] | types: string, int, double, date, image"); groupFillOptions.addArgument("-k", "--keep").dest(Dest.KEEP).action(Arguments.storeTrue()). help("don't delete the temporary .jrprint file. OBSOLETE use output format jrprint"); ArgumentGroup groupDbOptions = parser.addArgumentGroup("db options"); groupDbOptions.addArgument("-t").metavar("<dbtype>").dest(Dest.DB_TYPE). required(false).type(Arguments.enumType(DbType.class)).setDefault(DbType.none). help("database type: none, mysql, postgres, oracle, generic"); Argument argDbHost = groupDbOptions.addArgument("-H").metavar("<dbhost>").dest(Dest.DB_HOST).help("database host"); Argument argDbUser = groupDbOptions.addArgument("-u").metavar("<dbuser>").dest(Dest.DB_USER).help("database user"); Argument argDbPasswd = groupDbOptions.addArgument("-p").metavar("<dbpasswd>").dest(Dest.DB_PASSWD).setDefault("").help("database password"); Argument argDbName = groupDbOptions.addArgument("-n").metavar("<dbname>").dest(Dest.DB_NAME).help("database name"); Argument argDbSid = groupDbOptions.addArgument("--db-sid").metavar("<sid>").dest(Dest.DB_SID).help("oracle sid"); Argument argDbPort = groupDbOptions.addArgument("--db-port").metavar("<port>").dest(Dest.DB_PORT).type(Integer.class).help("database port"); Argument argDbDriver = groupDbOptions.addArgument("--db-driver").metavar("<name>").dest(Dest.DB_DRIVER).help("jdbc driver class name for use with type: generic"); Argument argDbUrl = groupDbOptions.addArgument("--db-url").metavar("<jdbcUrl>").dest(Dest.DB_URL).help("jdbc url without user, passwd with type:generic"); groupDbOptions.addArgument("--jdbc-dir").metavar("<dir>").dest(Dest.JDBC_DIR).type(File.class).help("directory where jdbc driver jars are located. Defaults to ./jdbc"); ArgumentGroup groupPrintOptions = parser.addArgumentGroup("print options"); groupPrintOptions.addArgument("-N").metavar("<printername>").dest(Dest.PRINTER_NAME).help("name of printer"); groupPrintOptions.addArgument("-d").dest(Dest.WITH_PRINT_DIALOG).action(Arguments.storeTrue()).help("show print dialog when printing"); groupPrintOptions.addArgument("-s").metavar("<reportname>").dest(Dest.REPORT_NAME).help("set internal report/document name when printing"); allArguments.put(argDbHost.getDest(), argDbHost); allArguments.put(argDbUser.getDest(), argDbUser); allArguments.put(argDbPasswd.getDest(), argDbPasswd); allArguments.put(argDbName.getDest(), argDbName); allArguments.put(argDbSid.getDest(), argDbSid); allArguments.put(argDbPort.getDest(), argDbPort); allArguments.put(argDbDriver.getDest(), argDbDriver); allArguments.put(argDbUrl.getDest(), argDbUrl); } private Namespace parseArgumentParser(String[] args, ArgumentParser parser) { Namespace ns = null; try { ns = parser.parseArgs(args); // change some arguments to required depending on db-type if (ns.get(Dest.DB_TYPE) != null) { if (ns.get(Dest.DB_TYPE).equals(DbType.none)) { } else if (ns.get(Dest.DB_TYPE).equals(DbType.mysql)) { allArguments.get(Dest.DB_HOST).required(true); allArguments.get(Dest.DB_USER).required(true); allArguments.get(Dest.DB_NAME).required(true); allArguments.get(Dest.DB_PORT).setDefault(DbType.mysql.getPort()); } else if (ns.get(Dest.DB_TYPE).equals(DbType.postgres)) { allArguments.get(Dest.DB_HOST).required(true); allArguments.get(Dest.DB_USER).required(true); allArguments.get(Dest.DB_NAME).required(true); allArguments.get(Dest.DB_PORT).setDefault(DbType.postgres.getPort()); } else if (ns.get(Dest.DB_TYPE).equals(DbType.oracle)) { allArguments.get(Dest.DB_HOST).required(true); allArguments.get(Dest.DB_USER).required(true); allArguments.get(Dest.DB_PASSWD).required(true); allArguments.get(Dest.DB_SID).required(true); allArguments.get(Dest.DB_PORT).setDefault(DbType.oracle.getPort()); } else if (ns.get(Dest.DB_TYPE).equals(DbType.generic)) { allArguments.get(Dest.DB_USER).required(true); allArguments.get(Dest.DB_DRIVER).required(true); allArguments.get(Dest.DB_URL).required(true); } } // parse again so changed arguments become effectiv ns = parser.parseArgs(args); } catch (ArgumentParserException ex) { parser.handleError(ex); System.exit(1); } if (ns.getBoolean(Dest.DEBUG)) { System.out.print("Command line:"); for (String arg : args) { System.out.print(" " + arg); } System.out.print("\n"); System.out.println(ns); } return ns; } /** * @return the namespace */ public Namespace getNamespace() { return namespace; } }
JAS-13 expanded argument help
src/main/java/de/cenote/jasperstarter/App.java
JAS-13 expanded argument help
Java
apache-2.0
9234b149413ac01fd463458f63b7bf516407ba15
0
phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida
package ca.corefacility.bioinformatics.irida.ria.web.pipelines; import java.io.IOException; import java.security.Principal; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.MessageSource; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import ca.corefacility.bioinformatics.irida.exceptions.DuplicateSampleException; import ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowNotFoundException; import ca.corefacility.bioinformatics.irida.model.enums.AnalysisType; import ca.corefacility.bioinformatics.irida.model.enums.ProjectRole; import ca.corefacility.bioinformatics.irida.model.joins.Join; import ca.corefacility.bioinformatics.irida.model.project.Project; import ca.corefacility.bioinformatics.irida.model.project.ReferenceFile; import ca.corefacility.bioinformatics.irida.model.sample.Sample; import ca.corefacility.bioinformatics.irida.model.sample.SampleSequencingObjectJoin; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFile; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePair; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePairSnapshot; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequencingObject; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SingleEndSequenceFile; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SingleEndSequenceFileSnapshot; import ca.corefacility.bioinformatics.irida.model.user.Role; import ca.corefacility.bioinformatics.irida.model.user.User; import ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow; import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowDescription; import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowParameter; import ca.corefacility.bioinformatics.irida.model.workflow.submission.IridaWorkflowNamedParameters; import ca.corefacility.bioinformatics.irida.ria.web.BaseController; import ca.corefacility.bioinformatics.irida.ria.web.analysis.CartController; import ca.corefacility.bioinformatics.irida.ria.web.pipelines.dto.WorkflowParametersToSave; import ca.corefacility.bioinformatics.irida.service.AnalysisSubmissionService; import ca.corefacility.bioinformatics.irida.service.ProjectService; import ca.corefacility.bioinformatics.irida.service.ReferenceFileService; import ca.corefacility.bioinformatics.irida.service.SequencingObjectService; import ca.corefacility.bioinformatics.irida.service.remote.SequenceFilePairRemoteService; import ca.corefacility.bioinformatics.irida.service.remote.SingleEndSequenceFileRemoteService; import ca.corefacility.bioinformatics.irida.service.snapshot.SequenceFilePairSnapshotService; import ca.corefacility.bioinformatics.irida.service.snapshot.SingleEndSequenceFileSnapshotService; import ca.corefacility.bioinformatics.irida.service.user.UserService; import ca.corefacility.bioinformatics.irida.service.workflow.IridaWorkflowsService; import ca.corefacility.bioinformatics.irida.service.workflow.WorkflowNamedParametersService; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Sets; /** * Controller for pipeline related views * */ @Controller @Scope("session") @RequestMapping(PipelineController.BASE_URL) public class PipelineController extends BaseController { // URI's public static final String BASE_URL = "/pipelines"; /* * CONSTANTS */ private static final String DEFAULT_WORKFLOW_PARAMETERS_ID = "default"; private static final String CUSTOM_UNSAVED_WORKFLOW_PARAMETERS_ID = "custom"; public static final String URL_EMPTY_CART_REDIRECT = "redirect:/pipelines"; public static final String URL_LAUNCH = "pipelines/pipeline_selection"; public static final String URL_GENERIC_PIPELINE = "pipelines/types/generic_pipeline"; public static final String URI_LIST_PIPELINES = "/ajax/list.json"; public static final String URI_AJAX_START_PIPELINE = "/ajax/start.json"; public static final String URI_AJAX_CART_LIST = "/ajax/cart_list.json"; // JSON KEYS public static final String JSON_KEY_SAMPLE_ID = "id"; public static final String JSON_KEY_SAMPLE_OMIT_FILES_LIST = "omit"; private static final Logger logger = LoggerFactory.getLogger(PipelineController.class); /* * SERVICES */ private ReferenceFileService referenceFileService; private SequencingObjectService sequencingObjectService; private AnalysisSubmissionService analysisSubmissionService; private ProjectService projectService; private UserService userService; private IridaWorkflowsService workflowsService; private MessageSource messageSource; private final WorkflowNamedParametersService namedParameterService; private SingleEndSequenceFileRemoteService sequenceFileSingleRemoteService; private SequenceFilePairRemoteService sequenceFilePairRemoteService; private SequenceFilePairSnapshotService remoteSequenceFilePairService; private SingleEndSequenceFileSnapshotService singleEndSequenceFileSnapshotService; /* * CONTROLLERS */ private CartController cartController; @Autowired public PipelineController(SequencingObjectService sequencingObjectService, ReferenceFileService referenceFileService, AnalysisSubmissionService analysisSubmissionService, IridaWorkflowsService iridaWorkflowsService, ProjectService projectService, UserService userService, CartController cartController, MessageSource messageSource, final WorkflowNamedParametersService namedParameterService, SequenceFilePairRemoteService sequenceFilePairRemoteService, SingleEndSequenceFileRemoteService sequenceFileSingleRemoteService, SequenceFilePairSnapshotService remoteSequenceFilePairService, SingleEndSequenceFileSnapshotService singleEndSequenceFileSnapshotService) { this.sequencingObjectService = sequencingObjectService; this.referenceFileService = referenceFileService; this.analysisSubmissionService = analysisSubmissionService; this.workflowsService = iridaWorkflowsService; this.projectService = projectService; this.userService = userService; this.cartController = cartController; this.messageSource = messageSource; this.namedParameterService = namedParameterService; this.sequenceFilePairRemoteService = sequenceFilePairRemoteService; this.remoteSequenceFilePairService = remoteSequenceFilePairService; this.sequenceFileSingleRemoteService = sequenceFileSingleRemoteService; this.singleEndSequenceFileSnapshotService = singleEndSequenceFileSnapshotService; } /** * Get the Pipeline Selection Page * * @param model * {@link Model} * @param locale * Current users {@link Locale} * * @return location of the pipeline selection page. */ @RequestMapping public String getPipelineLaunchPage(final Model model, Locale locale) { Set<AnalysisType> workflows = workflowsService.getRegisteredWorkflowTypes(); List<Map<String, String>> flows = new ArrayList<>(workflows.size()); workflows.stream().forEach(type -> { IridaWorkflow flow = null; try { flow = workflowsService.getDefaultWorkflowByType(type); IridaWorkflowDescription description = flow.getWorkflowDescription(); String name = type.toString(); String key = "workflow." + name; flows.add(ImmutableMap.of( "name", name, "id", description.getId().toString(), "title", messageSource .getMessage(key + ".title", null, locale), "description", messageSource .getMessage(key + ".description", null, locale) )); } catch (IridaWorkflowNotFoundException e) { logger.error("Workflow not found - See stack:", e); } }); flows.sort((f1, f2) -> f1.get("name").compareTo(f2.get("name"))); model.addAttribute("counts", getCartSummaryMap()); model.addAttribute("workflows", flows); return URL_LAUNCH; } /** * Get a generic pipeline page. * * @param model * the the model for the current request * @param principal * the user in the current request * @param locale * the locale that the user is using * @param pipelineId * the pipeline to load * @return a page reference or redirect to load. */ @RequestMapping(value = "/{pipelineId}") public String getSpecifiedPipelinePage(final Model model, Principal principal, Locale locale, @PathVariable UUID pipelineId) { String response = URL_EMPTY_CART_REDIRECT; Map<Project, Set<Sample>> cartMap = cartController.getSelected(); Map<String, Sample> remoteSelected = cartController.getRemoteSelected(); // Cannot run a pipeline on an empty cart! if (!cartMap.isEmpty() || !remoteSelected.isEmpty()) { IridaWorkflow flow = null; try { flow = workflowsService.getIridaWorkflow(pipelineId); } catch (IridaWorkflowNotFoundException e) { logger.error("Workflow not found - See stack:", e); return "redirect:errors/not_found"; } User user = userService.getUserByUsername(principal.getName()); // Get all the reference files that could be used for this pipeline. List<Map<String, Object>> referenceFileList = new ArrayList<>(); List<Map<String, Object>> projectList = new ArrayList<>(); List<Map<String, Object>> addRefList = new ArrayList<>(); IridaWorkflowDescription description = flow.getWorkflowDescription(); for (Project project : cartMap.keySet()) { // Check to see if it requires a reference file. if (description.requiresReference()) { List<Join<Project, ReferenceFile>> joinList = referenceFileService .getReferenceFilesForProject(project); for (Join<Project, ReferenceFile> join : joinList) { referenceFileList.add(ImmutableMap.of( "project", project, "file", join.getObject() )); } if (referenceFileList.size() == 0) { if (user.getSystemRole().equals(Role.ROLE_ADMIN) || projectService .userHasProjectRole(user, project, ProjectRole.PROJECT_OWNER)) { addRefList.add(ImmutableMap.of( "name", project.getLabel(), "id", project.getId() )); } } } Set<Sample> samples = cartMap.get(project); Map<String, Object> projectMap = new HashMap<>(); List<Map<String, Object>> sampleList = new ArrayList<>(); for (Sample sample : samples) { Map<String, Object> sampleMap = new HashMap<>(); sampleMap.put("name", sample.getLabel()); sampleMap.put("id", sample.getId().toString()); Map<String, List<? extends Object>> files = new HashMap<>(); // Paired end reads if (description.acceptsPairedSequenceFiles()) { Collection<SampleSequencingObjectJoin> pairs = sequencingObjectService.getSequencesForSampleOfType(sample, SequenceFilePair.class); files.put("paired_end", pairs.stream().map(SampleSequencingObjectJoin::getObject).collect(Collectors.toList())); } // Singe end reads if (description.acceptsSingleSequenceFiles()) { Collection<SampleSequencingObjectJoin> singles = sequencingObjectService.getSequencesForSampleOfType(sample, SingleEndSequenceFile.class); files.put("single_end", singles.stream().map(SampleSequencingObjectJoin::getObject) .collect(Collectors.toList())); } sampleMap.put("files", files); sampleList.add(sampleMap); } projectMap.put("id", project.getId().toString()); projectMap.put("name", project.getLabel()); projectMap.put("samples", sampleList); projectList.add(projectMap); } /* * Add remote samples */ List<Map<String, Object>> remoteSamples = new ArrayList<>(); logger.trace("Getting remote files for samples in cart"); for(String url : remoteSelected.keySet()){ Sample sample = remoteSelected.get(url); Map<String, Object> sampleMap = new HashMap<>(); sampleMap.put("name", sample.getLabel()); sampleMap.put("id", sample.getSelfHref()); Map<String, List<? extends Object>> files = new HashMap<>(); if (description.acceptsPairedSequenceFiles()) { logger.trace("Getting remote pairs for sample " + url); files.put("paired_end", sequenceFilePairRemoteService.getSequenceFilePairsForSample(sample)); } if (description.acceptsSingleSequenceFiles()) { logger.trace("Getting remote single files for sample " + url); files.put("single_end", sequenceFileSingleRemoteService.getUnpairedFilesForSample(sample)); } sampleMap.put("files", files); remoteSamples.add(sampleMap); } // Need to add the pipeline parameters final List<IridaWorkflowParameter> defaultWorkflowParameters = flow.getWorkflowDescription().getParameters(); final List<Map<String, Object>> parameters = new ArrayList<>(); if (defaultWorkflowParameters != null) { final List<Map<String, String>> defaultParameters = new ArrayList<>(); final String workflowName = description.getName().toLowerCase(); for (IridaWorkflowParameter p : defaultWorkflowParameters) { defaultParameters.add(ImmutableMap.of( "label", messageSource.getMessage("pipeline.parameters." + workflowName + "." + p.getName(), null, locale), "value", p.getDefaultValue(), "name", p.getName() )); } parameters.add(ImmutableMap.of("id", DEFAULT_WORKFLOW_PARAMETERS_ID, "label", messageSource.getMessage("workflow.parameters.named.default", null, locale), "parameters", defaultParameters)); final List<IridaWorkflowNamedParameters> namedParameters = namedParameterService.findNamedParametersForWorkflow(pipelineId); for (final IridaWorkflowNamedParameters p : namedParameters) { final List<Map<String, String>> namedParametersList = new ArrayList<>(); for (final Map.Entry<String, String> parameter : p.getInputParameters().entrySet()) { namedParametersList.add(ImmutableMap.of( "label", messageSource.getMessage("pipeline.parameters." + workflowName + "." + parameter.getKey(), null, locale), "value", parameter.getValue(), "name", parameter.getKey() )); } parameters.add(ImmutableMap.of("id", p.getId(), "label", p.getLabel(), "parameters", namedParametersList)); } model.addAttribute("parameters", parameters); model.addAttribute("parameterModalTitle", messageSource.getMessage("pipeline.parameters.modal-title." + workflowName, null, locale)); } else { model.addAttribute("noParameters", messageSource.getMessage("pipeline.no-parameters", null, locale)); } model.addAttribute("title", messageSource.getMessage("pipeline.title." + description.getName(), null, locale)); model.addAttribute("mainTitle", messageSource.getMessage("pipeline.h1." + description.getName(), null, locale)); model.addAttribute("name", description.getName()); model.addAttribute("pipelineId", pipelineId.toString()); model.addAttribute("referenceFiles", referenceFileList); model.addAttribute("referenceRequired", description.requiresReference()); model.addAttribute("addRefProjects", addRefList); model.addAttribute("projects", projectList); model.addAttribute("remoteSamples", remoteSamples); response = URL_GENERIC_PIPELINE; } return response; } // ************************************************************************************************ // AJAX // ************************************************************************************************ /** * Launch a pipeline * * @param locale * the locale that the browser is using for the current request. * @param pipelineId * the id for the {@link IridaWorkflow} * @param single * a list of {@link SequenceFile} id's * @param paired * a list of {@link SequenceFilePair} id's * @param remoteSingle * a list of remote {@link SequenceFile} URLs * @param remotePaired * A list of remote {@link SequenceFilePair} URLs * @param parameters * TODO: This is a hack! Update when fixing issue #100 * {@link Map} of ALL parameters passed. Only want the 'paras' * object: a {@link Map} of pipeline parameters * @param ref * the id for a {@link ReferenceFile} * @param name * a user provided name for the {@link IridaWorkflow} * @param analysisDescription * Optional description of the analysis * * @return a JSON response with the status and any messages. */ @RequestMapping(value = "/ajax/start/{pipelineId}", method = RequestMethod.POST) public @ResponseBody Map<String, Object> ajaxStartPipeline(Locale locale, @PathVariable UUID pipelineId, @RequestParam(required = false) List<Long> single, @RequestParam(required = false) List<Long> paired, @RequestParam(required = false) List<String> remoteSingle, @RequestParam(required = false) List<String> remotePaired, @RequestParam(required = false) Map<String, String> parameters, @RequestParam(required = false) Long ref, @RequestParam String name, @RequestParam(name = "description", required = false) String analysisDescription, @RequestParam(required = false) List<Long> sharedProjects) { Map<String, Object> result = ImmutableMap.of("success", true); try { IridaWorkflow flow = workflowsService.getIridaWorkflow(pipelineId); IridaWorkflowDescription description = flow.getWorkflowDescription(); // The pipeline needs to have a name. if (Strings.isNullOrEmpty(name)) { return ImmutableMap .of("error", messageSource.getMessage("workflow.no-name-provided", null, locale)); } // Check to see if a reference file is required. if (description.requiresReference() && ref == null) { return ImmutableMap.of("error", messageSource.getMessage("pipeline.error.no-reference.pipeline-start", null, locale)); } // Get a list of the files to submit List<SingleEndSequenceFile> singleEndFiles = new ArrayList<>(); List<SequenceFilePair> sequenceFilePairs = new ArrayList<>(); if (single != null) { Iterable<SequencingObject> readMultiple = sequencingObjectService.readMultiple(single); readMultiple.forEach(f -> { if (f instanceof SingleEndSequenceFile) { throw new IllegalArgumentException("file " + f.getId() + " not a single end file"); } singleEndFiles.add((SingleEndSequenceFile) f); }); // Check the single files for duplicates in a sample, throws SampleAnalysisDuplicateException sequencingObjectService.getUniqueSamplesForSequencingObjects(Sets.newHashSet(singleEndFiles)); } if (paired != null) { Iterable<SequencingObject> readMultiple = sequencingObjectService.readMultiple(paired); readMultiple.forEach(f -> { if (f instanceof SingleEndSequenceFile) { throw new IllegalArgumentException("file " + f.getId() + " not a single end file"); } sequenceFilePairs.add((SequenceFilePair) f); }); // Check the pair files for duplicates in a sample, throws SampleAnalysisDuplicateException sequencingObjectService.getUniqueSamplesForSequencingObjects(Sets.newHashSet(sequenceFilePairs)); } // Get a list of the remote files to submit List<SingleEndSequenceFileSnapshot> remoteSingleFiles = new ArrayList<>(); List<SequenceFilePairSnapshot> remotePairFiles = new ArrayList<>(); if(remoteSingle != null){ logger.debug("Mirroring" + remoteSingle.size() + " single files."); remoteSingleFiles = remoteSingle.stream().map((u) -> { SingleEndSequenceFile read = sequenceFileSingleRemoteService.read(u); return singleEndSequenceFileSnapshotService.mirrorFile(read); }).collect(Collectors.toList()); } if(remotePaired != null){ logger.debug("Mirroring" + remotePaired.size() + " pairs."); remotePairFiles = remotePaired.stream().map((u) -> { SequenceFilePair pair = sequenceFilePairRemoteService.read(u); return remoteSequenceFilePairService.mirrorPair(pair); }).collect(Collectors.toList()); } // Get the pipeline parameters Map<String, String> params = new HashMap<>(); IridaWorkflowNamedParameters namedParameters = null; if (parameters.containsKey("selectedParameters")) { try { final Map<String, Object> passedParameters = extractPipelineParameters(parameters .get("selectedParameters")); // we should only have *one* parameter set supplied. final String selectedParametersId = passedParameters.get("id").toString(); if (!DEFAULT_WORKFLOW_PARAMETERS_ID.equals(selectedParametersId) && !CUSTOM_UNSAVED_WORKFLOW_PARAMETERS_ID.equals(selectedParametersId)) { // this means that a named parameter set was selected // and unmodified, so load up that named parameter set // to pass along. namedParameters = namedParameterService.read(Long.valueOf(selectedParametersId)); } else { @SuppressWarnings("unchecked") final List<Map<String, String>> unnamedParameters = (List<Map<String, String>>) passedParameters.get("parameters"); for (final Map<String, String> parameter : unnamedParameters) { params.put(parameter.get("name"), parameter.get("value")); } } } catch (final IOException e) { return ImmutableMap .of("parameterError", messageSource.getMessage("pipeline.parameters.error", null, locale)); } } List<Project> projectsToShare = new ArrayList<>(); if (sharedProjects != null && !sharedProjects.isEmpty()) { projectsToShare = Lists.newArrayList(projectService.readMultiple(sharedProjects)); } if (description.getInputs().requiresSingleSample()) { analysisSubmissionService.createSingleSampleSubmission(flow, ref, singleEndFiles, sequenceFilePairs, remoteSingleFiles, remotePairFiles, params, namedParameters, name, analysisDescription, projectsToShare); } else { analysisSubmissionService.createMultipleSampleSubmission(flow, ref, singleEndFiles, sequenceFilePairs, remoteSingleFiles, remotePairFiles, params, namedParameters, name, analysisDescription, projectsToShare); } } catch (IridaWorkflowNotFoundException e) { logger.error("Cannot file IridaWorkflow [" + pipelineId + "]", e); result = ImmutableMap .of("pipelineError", messageSource.getMessage("pipeline.error.invalid-pipeline", null, locale)); } catch (DuplicateSampleException e) { logger.error("Multiple files for Sample found", e); result = ImmutableMap.of("pipelineError", messageSource.getMessage("pipeline.error.duplicate-samples", null, locale)); } return result; } /** * Save a set of {@link IridaWorkflowNamedParameters} and respond with the * ID that we saved the new set with. * * @param params * the DTO with the parameters to save. * @return a map with the ID of the saved named parameters. */ @RequestMapping(value = "/ajax/parameters", method = RequestMethod.POST) public @ResponseBody Map<String, Object> ajaxSaveParameters(@RequestBody final WorkflowParametersToSave params) { final IridaWorkflowNamedParameters namedParameters = namedParameterService.create(params.namedParameters()); return ImmutableMap.of("id", namedParameters.getId()); } /** * Extract {@link IridaWorkflow} parameters from the request {@link Map} * * @param mapString * {@link Map} of parameters * * @return {@link Map} of parameters for the pipeline * @throws IOException * when unable to parse the parameters from the provided string. */ @SuppressWarnings("unchecked") private Map<String, Object> extractPipelineParameters(String mapString) throws IOException { // TODO [15-02-16] (Josh): Update when addressing issue #100 ObjectMapper mapper = new ObjectMapper(); try { return mapper.readValue(mapString, Map.class); } catch (IOException e) { logger.error("Error extracting parameters from submission", e); throw e; } } /** * Get details about the contents of the cart. * * @return {@link Map} containing the counts of the projects and samples in the cart. */ private Map<String, Integer> getCartSummaryMap() { return ImmutableMap.of( "projects", cartController.getNumberOfProjects(), "samples", cartController.getNumberOfSamples() ); } }
src/main/java/ca/corefacility/bioinformatics/irida/ria/web/pipelines/PipelineController.java
package ca.corefacility.bioinformatics.irida.ria.web.pipelines; import java.io.IOException; import java.security.Principal; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.MessageSource; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import ca.corefacility.bioinformatics.irida.exceptions.DuplicateSampleException; import ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowNotFoundException; import ca.corefacility.bioinformatics.irida.model.enums.AnalysisType; import ca.corefacility.bioinformatics.irida.model.enums.ProjectRole; import ca.corefacility.bioinformatics.irida.model.joins.Join; import ca.corefacility.bioinformatics.irida.model.project.Project; import ca.corefacility.bioinformatics.irida.model.project.ReferenceFile; import ca.corefacility.bioinformatics.irida.model.sample.Sample; import ca.corefacility.bioinformatics.irida.model.sample.SampleSequencingObjectJoin; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFile; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePair; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePairSnapshot; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequencingObject; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SingleEndSequenceFile; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SingleEndSequenceFileSnapshot; import ca.corefacility.bioinformatics.irida.model.user.Role; import ca.corefacility.bioinformatics.irida.model.user.User; import ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow; import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowDescription; import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowParameter; import ca.corefacility.bioinformatics.irida.model.workflow.submission.IridaWorkflowNamedParameters; import ca.corefacility.bioinformatics.irida.ria.web.BaseController; import ca.corefacility.bioinformatics.irida.ria.web.analysis.CartController; import ca.corefacility.bioinformatics.irida.ria.web.pipelines.dto.WorkflowParametersToSave; import ca.corefacility.bioinformatics.irida.service.AnalysisSubmissionService; import ca.corefacility.bioinformatics.irida.service.ProjectService; import ca.corefacility.bioinformatics.irida.service.ReferenceFileService; import ca.corefacility.bioinformatics.irida.service.SequencingObjectService; import ca.corefacility.bioinformatics.irida.service.remote.SequenceFilePairRemoteService; import ca.corefacility.bioinformatics.irida.service.remote.SingleEndSequenceFileRemoteService; import ca.corefacility.bioinformatics.irida.service.snapshot.SequenceFilePairSnapshotService; import ca.corefacility.bioinformatics.irida.service.snapshot.SingleEndSequenceFileSnapshotService; import ca.corefacility.bioinformatics.irida.service.user.UserService; import ca.corefacility.bioinformatics.irida.service.workflow.IridaWorkflowsService; import ca.corefacility.bioinformatics.irida.service.workflow.WorkflowNamedParametersService; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Sets; /** * Controller for pipeline related views * */ @Controller @Scope("session") @RequestMapping(PipelineController.BASE_URL) public class PipelineController extends BaseController { // URI's public static final String BASE_URL = "/pipelines"; /* * CONSTANTS */ private static final String DEFAULT_WORKFLOW_PARAMETERS_ID = "default"; private static final String CUSTOM_UNSAVED_WORKFLOW_PARAMETERS_ID = "custom"; public static final String URL_EMPTY_CART_REDIRECT = "redirect:/pipelines"; public static final String URL_LAUNCH = "pipelines/pipeline_selection"; public static final String URL_GENERIC_PIPELINE = "pipelines/types/generic_pipeline"; public static final String URI_LIST_PIPELINES = "/ajax/list.json"; public static final String URI_AJAX_START_PIPELINE = "/ajax/start.json"; public static final String URI_AJAX_CART_LIST = "/ajax/cart_list.json"; // JSON KEYS public static final String JSON_KEY_SAMPLE_ID = "id"; public static final String JSON_KEY_SAMPLE_OMIT_FILES_LIST = "omit"; private static final Logger logger = LoggerFactory.getLogger(PipelineController.class); /* * SERVICES */ private ReferenceFileService referenceFileService; private SequencingObjectService sequencingObjectService; private AnalysisSubmissionService analysisSubmissionService; private ProjectService projectService; private UserService userService; private IridaWorkflowsService workflowsService; private MessageSource messageSource; private final WorkflowNamedParametersService namedParameterService; private SingleEndSequenceFileRemoteService sequenceFileSingleRemoteService; private SequenceFilePairRemoteService sequenceFilePairRemoteService; private SequenceFilePairSnapshotService remoteSequenceFilePairService; private SingleEndSequenceFileSnapshotService singleEndSequenceFileSnapshotService; /* * CONTROLLERS */ private CartController cartController; @Autowired public PipelineController(SequencingObjectService sequencingObjectService, ReferenceFileService referenceFileService, AnalysisSubmissionService analysisSubmissionService, IridaWorkflowsService iridaWorkflowsService, ProjectService projectService, UserService userService, CartController cartController, MessageSource messageSource, final WorkflowNamedParametersService namedParameterService, SequenceFilePairRemoteService sequenceFilePairRemoteService, SingleEndSequenceFileRemoteService sequenceFileSingleRemoteService, SequenceFilePairSnapshotService remoteSequenceFilePairService, SingleEndSequenceFileSnapshotService singleEndSequenceFileSnapshotService) { this.sequencingObjectService = sequencingObjectService; this.referenceFileService = referenceFileService; this.analysisSubmissionService = analysisSubmissionService; this.workflowsService = iridaWorkflowsService; this.projectService = projectService; this.userService = userService; this.cartController = cartController; this.messageSource = messageSource; this.namedParameterService = namedParameterService; this.sequenceFilePairRemoteService = sequenceFilePairRemoteService; this.remoteSequenceFilePairService = remoteSequenceFilePairService; this.sequenceFileSingleRemoteService = sequenceFileSingleRemoteService; this.singleEndSequenceFileSnapshotService = singleEndSequenceFileSnapshotService; } /** * Get the Pipeline Selection Page * * @param model * {@link Model} * @param locale * Current users {@link Locale} * * @return location of the pipeline selection page. */ @RequestMapping public String getPipelineLaunchPage(final Model model, Locale locale) { Set<AnalysisType> workflows = workflowsService.getRegisteredWorkflowTypes(); List<Map<String, String>> flows = new ArrayList<>(workflows.size()); workflows.stream().forEach(type -> { IridaWorkflow flow = null; try { flow = workflowsService.getDefaultWorkflowByType(type); IridaWorkflowDescription description = flow.getWorkflowDescription(); String name = type.toString(); String key = "workflow." + name; flows.add(ImmutableMap.of( "name", name, "id", description.getId().toString(), "title", messageSource .getMessage(key + ".title", null, locale), "description", messageSource .getMessage(key + ".description", null, locale) )); } catch (IridaWorkflowNotFoundException e) { logger.error("Workflow not found - See stack:", e); } }); flows.sort((f1, f2) -> f1.get("name").compareTo(f2.get("name"))); model.addAttribute("counts", getCartSummaryMap()); model.addAttribute("workflows", flows); return URL_LAUNCH; } /** * Get a generic pipeline page. * * @param model * the the model for the current request * @param principal * the user in the current request * @param locale * the locale that the user is using * @param pipelineId * the pipeline to load * @return a page reference or redirect to load. */ @RequestMapping(value = "/{pipelineId}") public String getSpecifiedPipelinePage(final Model model, Principal principal, Locale locale, @PathVariable UUID pipelineId) { String response = URL_EMPTY_CART_REDIRECT; Map<Project, Set<Sample>> cartMap = cartController.getSelected(); Map<String, Sample> remoteSelected = cartController.getRemoteSelected(); // Cannot run a pipeline on an empty cart! if (!cartMap.isEmpty() || !remoteSelected.isEmpty()) { IridaWorkflow flow = null; try { flow = workflowsService.getIridaWorkflow(pipelineId); } catch (IridaWorkflowNotFoundException e) { logger.error("Workflow not found - See stack:", e); return "redirect:errors/not_found"; } User user = userService.getUserByUsername(principal.getName()); // Get all the reference files that could be used for this pipeline. List<Map<String, Object>> referenceFileList = new ArrayList<>(); List<Map<String, Object>> projectList = new ArrayList<>(); List<Map<String, Object>> addRefList = new ArrayList<>(); IridaWorkflowDescription description = flow.getWorkflowDescription(); for (Project project : cartMap.keySet()) { // Check to see if it requires a reference file. if (description.requiresReference()) { List<Join<Project, ReferenceFile>> joinList = referenceFileService .getReferenceFilesForProject(project); for (Join<Project, ReferenceFile> join : joinList) { referenceFileList.add(ImmutableMap.of( "project", project, "file", join.getObject() )); } if (referenceFileList.size() == 0) { if (user.getSystemRole().equals(Role.ROLE_ADMIN) || projectService .userHasProjectRole(user, project, ProjectRole.PROJECT_OWNER)) { addRefList.add(ImmutableMap.of( "name", project.getLabel(), "id", project.getId() )); } } } Set<Sample> samples = cartMap.get(project); Map<String, Object> projectMap = new HashMap<>(); List<Map<String, Object>> sampleList = new ArrayList<>(); for (Sample sample : samples) { Map<String, Object> sampleMap = new HashMap<>(); sampleMap.put("name", sample.getLabel()); sampleMap.put("id", sample.getId().toString()); Map<String, List<? extends Object>> files = new HashMap<>(); // Paired end reads if (description.acceptsPairedSequenceFiles()) { Collection<SampleSequencingObjectJoin> pairs = sequencingObjectService.getSequencesForSampleOfType(sample, SequenceFilePair.class); files.put("paired_end", pairs.stream().map(SampleSequencingObjectJoin::getObject).collect(Collectors.toList())); } // Singe end reads if (description.acceptsSingleSequenceFiles()) { Collection<SampleSequencingObjectJoin> singles = sequencingObjectService.getSequencesForSampleOfType(sample, SingleEndSequenceFile.class); files.put("single_end", singles.stream().map(SampleSequencingObjectJoin::getObject) .collect(Collectors.toList())); } sampleMap.put("files", files); sampleList.add(sampleMap); } projectMap.put("id", project.getId().toString()); projectMap.put("name", project.getLabel()); projectMap.put("samples", sampleList); projectList.add(projectMap); } /* * Add remote samples */ List<Map<String, Object>> remoteSamples = new ArrayList<>(); logger.trace("Getting remote files for samples in cart"); for(String url : remoteSelected.keySet()){ Sample sample = remoteSelected.get(url); Map<String, Object> sampleMap = new HashMap<>(); sampleMap.put("name", sample.getLabel()); sampleMap.put("id", sample.getSelfHref()); Map<String, List<? extends Object>> files = new HashMap<>(); if (description.acceptsPairedSequenceFiles()) { logger.trace("Getting remote pairs for sample " + url); files.put("paired_end", sequenceFilePairRemoteService.getSequenceFilePairsForSample(sample)); } if (description.acceptsSingleSequenceFiles()) { logger.trace("Getting remote single files for sample " + url); files.put("single_end", sequenceFileSingleRemoteService.getUnpairedFilesForSample(sample)); } sampleMap.put("files", files); remoteSamples.add(sampleMap); } // Need to add the pipeline parameters final List<IridaWorkflowParameter> defaultWorkflowParameters = flow.getWorkflowDescription().getParameters(); final List<Map<String, Object>> parameters = new ArrayList<>(); if (defaultWorkflowParameters != null) { final List<Map<String, String>> defaultParameters = new ArrayList<>(); final String workflowName = description.getName().toLowerCase(); for (IridaWorkflowParameter p : defaultWorkflowParameters) { defaultParameters.add(ImmutableMap.of( "label", messageSource.getMessage("pipeline.parameters." + workflowName + "." + p.getName(), null, locale), "value", p.getDefaultValue(), "name", p.getName() )); } parameters.add(ImmutableMap.of("id", DEFAULT_WORKFLOW_PARAMETERS_ID, "label", messageSource.getMessage("workflow.parameters.named.default", null, locale), "parameters", defaultParameters)); final List<IridaWorkflowNamedParameters> namedParameters = namedParameterService.findNamedParametersForWorkflow(pipelineId); for (final IridaWorkflowNamedParameters p : namedParameters) { final List<Map<String, String>> namedParametersList = new ArrayList<>(); for (final Map.Entry<String, String> parameter : p.getInputParameters().entrySet()) { namedParametersList.add(ImmutableMap.of( "label", messageSource.getMessage("pipeline.parameters." + workflowName + "." + parameter.getKey(), null, locale), "value", parameter.getValue(), "name", parameter.getKey() )); } parameters.add(ImmutableMap.of("id", p.getId(), "label", p.getLabel(), "parameters", namedParametersList)); } model.addAttribute("parameters", parameters); model.addAttribute("parameterModalTitle", messageSource.getMessage("pipeline.parameters.modal-title." + workflowName, null, locale)); } else { model.addAttribute("noParameters", messageSource.getMessage("pipeline.no-parameters", null, locale)); } model.addAttribute("title", messageSource.getMessage("pipeline.title." + description.getName(), null, locale)); model.addAttribute("mainTitle", messageSource.getMessage("pipeline.h1." + description.getName(), null, locale)); model.addAttribute("name", description.getName()); model.addAttribute("pipelineId", pipelineId.toString()); model.addAttribute("referenceFiles", referenceFileList); model.addAttribute("referenceRequired", description.requiresReference()); model.addAttribute("addRefProjects", addRefList); model.addAttribute("projects", projectList); model.addAttribute("remoteSamples", remoteSamples); response = URL_GENERIC_PIPELINE; } return response; } // ************************************************************************************************ // AJAX // ************************************************************************************************ /** * Launch a pipeline * * @param locale * the locale that the browser is using for the current request. * @param pipelineId * the id for the {@link IridaWorkflow} * @param single * a list of {@link SequenceFile} id's * @param paired * a list of {@link SequenceFilePair} id's * @param remoteSingle * a list of remote {@link SequenceFile} URLs * @param remotePaired * A list of remote {@link SequenceFilePair} URLs * @param parameters * TODO: This is a hack! Update when fixing issue #100 * {@link Map} of ALL parameters passed. Only want the 'paras' * object: a {@link Map} of pipeline parameters * @param ref * the id for a {@link ReferenceFile} * @param name * a user provided name for the {@link IridaWorkflow} * @param analysisDescription * Optional description of the analysis * * @return a JSON response with the status and any messages. */ @RequestMapping(value = "/ajax/start/{pipelineId}", method = RequestMethod.POST) public @ResponseBody Map<String, Object> ajaxStartPipeline(Locale locale, @PathVariable UUID pipelineId, @RequestParam(required = false) List<Long> single, @RequestParam(required = false) List<Long> paired, @RequestParam(required = false) List<String> remoteSingle, @RequestParam(required = false) List<String> remotePaired, @RequestParam(required = false) Map<String, String> parameters, @RequestParam(required = false) Long ref, @RequestParam String name, @RequestParam(name = "description", required = false) String analysisDescription, @RequestParam(required = false) List<Long> sharedProjects) { Map<String, Object> result = ImmutableMap.of("success", true); try { IridaWorkflow flow = workflowsService.getIridaWorkflow(pipelineId); IridaWorkflowDescription description = flow.getWorkflowDescription(); // The pipeline needs to have a name. if (Strings.isNullOrEmpty(name)) { return ImmutableMap .of("error", messageSource.getMessage("workflow.no-name-provided", null, locale)); } // Check to see if a reference file is required. if (description.requiresReference() && ref == null) { return ImmutableMap.of("error", messageSource.getMessage("pipeline.error.no-reference.pipeline-start", null, locale)); } // Get a list of the files to submit List<SingleEndSequenceFile> singleEndFiles = new ArrayList<>(); List<SequenceFilePair> sequenceFilePairs = new ArrayList<>(); if (single != null) { Iterable<SequencingObject> readMultiple = sequencingObjectService.readMultiple(single); readMultiple.forEach(f -> { if (f instanceof SingleEndSequenceFile) { throw new IllegalArgumentException("file " + f.getId() + " not a single end file"); } singleEndFiles.add((SingleEndSequenceFile) f); }); // Check the single files for duplicates in a sample, throws SampleAnalysisDuplicateException sequencingObjectService.getUniqueSamplesForSequencingObjects(Sets.newHashSet(singleEndFiles)); } if (paired != null) { Iterable<SequencingObject> readMultiple = sequencingObjectService.readMultiple(paired); readMultiple.forEach(f -> { if (f instanceof SingleEndSequenceFile) { throw new IllegalArgumentException("file " + f.getId() + " not a single end file"); } sequenceFilePairs.add((SequenceFilePair) f); }); // Check the pair files for duplicates in a sample, throws SampleAnalysisDuplicateException sequencingObjectService.getUniqueSamplesForSequencingObjects(Sets.newHashSet(sequenceFilePairs)); } // Get a list of the remote files to submit List<SingleEndSequenceFileSnapshot> remoteSingleFiles = new ArrayList<>(); List<SequenceFilePairSnapshot> remotePairFiles = new ArrayList<>(); if(remoteSingle != null){ logger.debug("Mirroring" + remoteSingle.size() + " single files."); remoteSingleFiles = remoteSingle.stream().map((u) -> { SingleEndSequenceFile read = sequenceFileSingleRemoteService.read(u); return singleEndSequenceFileSnapshotService.mirrorFile(read); }).collect(Collectors.toList()); } if(remotePaired != null){ logger.debug("Mirroring" + remotePaired.size() + " pairs."); remotePairFiles = remotePaired.stream().map((u) -> { SequenceFilePair pair = sequenceFilePairRemoteService.read(u); return remoteSequenceFilePairService.mirrorPair(pair); }).collect(Collectors.toList()); } // Get the pipeline parameters Map<String, String> params = new HashMap<>(); IridaWorkflowNamedParameters namedParameters = null; if (parameters.containsKey("selectedParameters")) { try { final Map<String, Object> passedParameters = extractPipelineParameters(parameters .get("selectedParameters")); // we should only have *one* parameter set supplied. final String selectedParametersId = passedParameters.get("id").toString(); if (!DEFAULT_WORKFLOW_PARAMETERS_ID.equals(selectedParametersId) && !CUSTOM_UNSAVED_WORKFLOW_PARAMETERS_ID.equals(selectedParametersId)) { // this means that a named parameter set was selected // and unmodified, so load up that named parameter set // to pass along. namedParameters = namedParameterService.read(Long.valueOf(selectedParametersId)); } else { @SuppressWarnings("unchecked") final List<Map<String, String>> unnamedParameters = (List<Map<String, String>>) passedParameters.get("parameters"); for (final Map<String, String> parameter : unnamedParameters) { params.put(parameter.get("name"), parameter.get("value")); } } } catch (final IOException e) { return ImmutableMap .of("parameterError", messageSource.getMessage("pipeline.parameters.error", null, locale)); } } List<Project> projectsToShare = Lists.newArrayList(projectService.readMultiple(sharedProjects)); if (description.getInputs().requiresSingleSample()) { analysisSubmissionService.createSingleSampleSubmission(flow, ref, singleEndFiles, sequenceFilePairs, remoteSingleFiles, remotePairFiles, params, namedParameters, name, analysisDescription, projectsToShare); } else { analysisSubmissionService.createMultipleSampleSubmission(flow, ref, singleEndFiles, sequenceFilePairs, remoteSingleFiles, remotePairFiles, params, namedParameters, name, analysisDescription, projectsToShare); } } catch (IridaWorkflowNotFoundException e) { logger.error("Cannot file IridaWorkflow [" + pipelineId + "]", e); result = ImmutableMap .of("pipelineError", messageSource.getMessage("pipeline.error.invalid-pipeline", null, locale)); } catch (DuplicateSampleException e) { logger.error("Multiple files for Sample found", e); result = ImmutableMap.of("pipelineError", messageSource.getMessage("pipeline.error.duplicate-samples", null, locale)); } return result; } /** * Save a set of {@link IridaWorkflowNamedParameters} and respond with the * ID that we saved the new set with. * * @param params * the DTO with the parameters to save. * @return a map with the ID of the saved named parameters. */ @RequestMapping(value = "/ajax/parameters", method = RequestMethod.POST) public @ResponseBody Map<String, Object> ajaxSaveParameters(@RequestBody final WorkflowParametersToSave params) { final IridaWorkflowNamedParameters namedParameters = namedParameterService.create(params.namedParameters()); return ImmutableMap.of("id", namedParameters.getId()); } /** * Extract {@link IridaWorkflow} parameters from the request {@link Map} * * @param mapString * {@link Map} of parameters * * @return {@link Map} of parameters for the pipeline * @throws IOException * when unable to parse the parameters from the provided string. */ @SuppressWarnings("unchecked") private Map<String, Object> extractPipelineParameters(String mapString) throws IOException { // TODO [15-02-16] (Josh): Update when addressing issue #100 ObjectMapper mapper = new ObjectMapper(); try { return mapper.readValue(mapString, Map.class); } catch (IOException e) { logger.error("Error extracting parameters from submission", e); throw e; } } /** * Get details about the contents of the cart. * * @return {@link Map} containing the counts of the projects and samples in the cart. */ private Map<String, Integer> getCartSummaryMap() { return ImmutableMap.of( "projects", cartController.getNumberOfProjects(), "samples", cartController.getNumberOfSamples() ); } }
fixed nullpointer when regular user was submitting analysis due to project sharing
src/main/java/ca/corefacility/bioinformatics/irida/ria/web/pipelines/PipelineController.java
fixed nullpointer when regular user was submitting analysis due to project sharing
Java
apache-2.0
b13577371770916ef7aadca5f6326723a6adf8e2
0
nojustiniano/HolandaCatalinaFw,javaito/HolandaCatalinaFw,javaito/HolandaCatalinaFw,kevchuk/HolandaCatalinaFw,javaito/HolandaCatalinaFw,nojustiniano/HolandaCatalinaFw
package org.hcjf.encoding; import java.util.HashMap; import java.util.Map; /** * Contains commons mime types. * @author javaito */ public class MimeType { public static final MimeType APPLICATION_ACAD = new MimeType("application/acad"); public static final MimeType APPLICATION_ARJ = new MimeType("application/arj"); public static final MimeType APPLICATION_BASE64 = new MimeType("application/base64"); public static final MimeType APPLICATION_BINHEX = new MimeType("application/binhex"); public static final MimeType APPLICATION_BINHEX4 = new MimeType("application/binhex4"); public static final MimeType APPLICATION_BOOK = new MimeType("application/book"); public static final MimeType APPLICATION_BSON = new MimeType("application/bson"); public static final MimeType APPLICATION_CDF = new MimeType("application/cdf"); public static final MimeType APPLICATION_CLARISCAD = new MimeType("application/clariscad"); public static final MimeType APPLICATION_COMMONGROUND = new MimeType("application/commonground"); public static final MimeType APPLICATION_DRAFTING = new MimeType("application/drafting"); public static final MimeType APPLICATION_DSPTYPE = new MimeType("application/dsptype"); public static final MimeType APPLICATION_DXF = new MimeType("application/dxf"); public static final MimeType APPLICATION_ECMASCRIPT = new MimeType("application/ecmascript"); public static final MimeType APPLICATION_ENVOY = new MimeType("application/envoy"); public static final MimeType APPLICATION_EXCEL = new MimeType("application/excel"); public static final MimeType APPLICATION_FRACTALS = new MimeType("application/fractals"); public static final MimeType APPLICATION_FREELOADER = new MimeType("application/freeloader"); public static final MimeType APPLICATION_FUTURESPLASH = new MimeType("application/futuresplash"); public static final MimeType APPLICATION_GNUTAR = new MimeType("application/gnutar"); public static final MimeType APPLICATION_GROUPWISE = new MimeType("application/groupwise"); public static final MimeType APPLICATION_HLP = new MimeType("application/hlp"); public static final MimeType APPLICATION_HTA = new MimeType("application/hta"); public static final MimeType APPLICATION_IGES = new MimeType("application/iges"); public static final MimeType APPLICATION_INF = new MimeType("application/inf"); public static final MimeType APPLICATION_I_DEAS = new MimeType("application/i-deas"); public static final MimeType APPLICATION_JAVA = new MimeType("application/java"); public static final MimeType APPLICATION_JAVASCRIPT = new MimeType("application/javascript"); public static final MimeType APPLICATION_JAVA_BYTE_CODE = new MimeType("application/java-byte-code"); public static final MimeType APPLICATION_JSON = new MimeType("application/json"); public static final MimeType APPLICATION_LHA = new MimeType("application/lha"); public static final MimeType APPLICATION_LZX = new MimeType("application/lzx"); public static final MimeType APPLICATION_MACBINARY = new MimeType("application/macbinary"); public static final MimeType APPLICATION_MAC_BINARY = new MimeType("application/mac-binary"); public static final MimeType APPLICATION_MAC_BINHEX = new MimeType("application/mac-binhex"); public static final MimeType APPLICATION_MAC_BINHEX40 = new MimeType("application/mac-binhex40"); public static final MimeType APPLICATION_MAC_COMPACTPRO = new MimeType("application/mac-compactpro"); public static final MimeType APPLICATION_MARC = new MimeType("application/marc"); public static final MimeType APPLICATION_MBEDLET = new MimeType("application/mbedlet"); public static final MimeType APPLICATION_MCAD = new MimeType("application/mcad"); public static final MimeType APPLICATION_MIME = new MimeType("application/mime"); public static final MimeType APPLICATION_MSPOWERPOINT = new MimeType("application/mspowerpoint"); public static final MimeType APPLICATION_MSWORD = new MimeType("application/msword"); public static final MimeType APPLICATION_MSWRITE = new MimeType("application/mswrite"); public static final MimeType APPLICATION_NETMC = new MimeType("application/netmc"); public static final MimeType APPLICATION_OCTET_STREAM = new MimeType("application/octet-stream"); public static final MimeType APPLICATION_ODA = new MimeType("application/oda"); public static final MimeType APPLICATION_PDF = new MimeType("application/pdf"); public static final MimeType APPLICATION_PKCS10 = new MimeType("application/pkcs10"); public static final MimeType APPLICATION_PKCS7_MIME = new MimeType("application/pkcs7-mime"); public static final MimeType APPLICATION_PKCS7_SIGNATURE = new MimeType("application/pkcs7-signature"); public static final MimeType APPLICATION_PKCS_12 = new MimeType("application/pkcs-12"); public static final MimeType APPLICATION_PKCS_CRL = new MimeType("application/pkcs-crl"); public static final MimeType APPLICATION_PKIX_CERT = new MimeType("application/pkix-cert"); public static final MimeType APPLICATION_PKIX_CRL = new MimeType("application/pkix-crl"); public static final MimeType APPLICATION_PLAIN = new MimeType("application/plain"); public static final MimeType APPLICATION_POSTSCRIPT = new MimeType("application/postscript"); public static final MimeType APPLICATION_POWERPOINT = new MimeType("application/powerpoint"); public static final MimeType APPLICATION_PRO_ENG = new MimeType("application/pro_eng"); public static final MimeType APPLICATION_RINGING_TONES = new MimeType("application/ringing-tones"); public static final MimeType APPLICATION_RTF = new MimeType("application/rtf"); public static final MimeType APPLICATION_SDP = new MimeType("application/sdp"); public static final MimeType APPLICATION_SEA = new MimeType("application/sea"); public static final MimeType APPLICATION_SET = new MimeType("application/set"); public static final MimeType APPLICATION_SLA = new MimeType("application/sla"); public static final MimeType APPLICATION_SMIL = new MimeType("application/smil"); public static final MimeType APPLICATION_SOAP_XML = new MimeType("application/soap+xml"); public static final MimeType APPLICATION_SOLIDS = new MimeType("application/solids"); public static final MimeType APPLICATION_SOUNDER = new MimeType("application/sounder"); public static final MimeType APPLICATION_STEP = new MimeType("application/step"); public static final MimeType APPLICATION_STREAMINGMEDIA = new MimeType("application/streamingmedia"); public static final MimeType APPLICATION_TOOLBOOK = new MimeType("application/toolbook"); public static final MimeType APPLICATION_VDA = new MimeType("application/vda"); public static final MimeType APPLICATION_VND_FDF = new MimeType("application/vnd.fdf"); public static final MimeType APPLICATION_VND_HP_HPGL = new MimeType("application/vnd.hp-hpgl"); public static final MimeType APPLICATION_VND_HP_PCL = new MimeType("application/vnd.hp-pcl"); public static final MimeType APPLICATION_VND_MS_EXCEL = new MimeType("application/vnd.ms-excel"); public static final MimeType APPLICATION_VND_MS_PKI_CERTSTORE = new MimeType("application/vnd.ms-pki.certstore"); public static final MimeType APPLICATION_VND_MS_PKI_PKO = new MimeType("application/vnd.ms-pki.pko"); public static final MimeType APPLICATION_VND_MS_PKI_SECCAT = new MimeType("application/vnd.ms-pki.seccat"); public static final MimeType APPLICATION_VND_MS_PKI_STL = new MimeType("application/vnd.ms-pki.stl"); public static final MimeType APPLICATION_VND_MS_POWERPOINT = new MimeType("application/vnd.ms-powerpoint"); public static final MimeType APPLICATION_VND_MS_PROJECT = new MimeType("application/vnd.ms-project"); public static final MimeType APPLICATION_VND_NOKIA_CONFIGURATION_MESSAGE = new MimeType("application/vnd.nokia.configuration-message"); public static final MimeType APPLICATION_VND_NOKIA_RINGING_TONE = new MimeType("application/vnd.nokia.ringing-tone"); public static final MimeType APPLICATION_VND_RN_REALMEDIA = new MimeType("application/vnd.rn-realmedia"); public static final MimeType APPLICATION_VND_RN_REALPLAYER = new MimeType("application/vnd.rn-realplayer"); public static final MimeType APPLICATION_VND_WAP_WMLC = new MimeType("application/vnd.wap.wmlc"); public static final MimeType APPLICATION_VND_WAP_WMLSCRIPTC = new MimeType("application/vnd.wap.wmlscriptc"); public static final MimeType APPLICATION_VND_XARA = new MimeType("application/vnd.xara"); public static final MimeType APPLICATION_VOCALTEC_MEDIA_DESC = new MimeType("application/vocaltec-media-desc"); public static final MimeType APPLICATION_VOCALTEC_MEDIA_FILE = new MimeType("application/vocaltec-media-file"); public static final MimeType APPLICATION_WORDPERFECT = new MimeType("application/wordperfect"); public static final MimeType APPLICATION_WORDPERFECT6_0 = new MimeType("application/wordperfect6.0"); public static final MimeType APPLICATION_WORDPERFECT6_1 = new MimeType("application/wordperfect6.1"); public static final MimeType APPLICATION_XML = new MimeType("application/xml"); public static final MimeType APPLICATION_X_123 = new MimeType("application/x-123"); public static final MimeType APPLICATION_X_AIM = new MimeType("application/x-aim"); public static final MimeType APPLICATION_X_AUTHORWARE_BIN = new MimeType("application/x-authorware-bin"); public static final MimeType APPLICATION_X_AUTHORWARE_MAP = new MimeType("application/x-authorware-map"); public static final MimeType APPLICATION_X_AUTHORWARE_SEG = new MimeType("application/x-authorware-seg"); public static final MimeType APPLICATION_X_BCPIO = new MimeType("application/x-bcpio"); public static final MimeType APPLICATION_X_BINARY = new MimeType("application/x-binary"); public static final MimeType APPLICATION_X_BINHEX40 = new MimeType("application/x-binhex40"); public static final MimeType APPLICATION_X_BSH = new MimeType("application/x-bsh"); public static final MimeType APPLICATION_X_BYTECODE_ELISP = new MimeType("application/x-bytecode.elisp"); public static final MimeType APPLICATION_X_BYTECODE_PYTHON = new MimeType("application/x-bytecode.python"); public static final MimeType APPLICATION_X_BZIP = new MimeType("application/x-bzip"); public static final MimeType APPLICATION_X_BZIP2 = new MimeType("application/x-bzip2"); public static final MimeType APPLICATION_X_CDF = new MimeType("application/x-cdf"); public static final MimeType APPLICATION_X_CDLINK = new MimeType("application/x-cdlink"); public static final MimeType APPLICATION_X_CHAT = new MimeType("application/x-chat"); public static final MimeType APPLICATION_X_CMU_RASTER = new MimeType("application/x-cmu-raster"); public static final MimeType APPLICATION_X_COCOA = new MimeType("application/x-cocoa"); public static final MimeType APPLICATION_X_COMPACTPRO = new MimeType("application/x-compactpro"); public static final MimeType APPLICATION_X_COMPRESS = new MimeType("application/x-compress"); public static final MimeType APPLICATION_X_COMPRESSED = new MimeType("application/x-compressed"); public static final MimeType APPLICATION_X_CONFERENCE = new MimeType("application/x-conference"); public static final MimeType APPLICATION_X_CPIO = new MimeType("application/x-cpio"); public static final MimeType APPLICATION_X_CPT = new MimeType("application/x-cpt"); public static final MimeType APPLICATION_X_CSH = new MimeType("application/x-csh"); public static final MimeType APPLICATION_X_DEEPV = new MimeType("application/x-deepv"); public static final MimeType APPLICATION_X_DIRECTOR = new MimeType("application/x-director"); public static final MimeType APPLICATION_X_DVI = new MimeType("application/x-dvi"); public static final MimeType APPLICATION_X_ELC = new MimeType("application/x-elc"); public static final MimeType APPLICATION_X_ENVOY = new MimeType("application/x-envoy"); public static final MimeType APPLICATION_X_ESREHBER = new MimeType("application/x-esrehber"); public static final MimeType APPLICATION_X_EXCEL = new MimeType("application/x-excel"); public static final MimeType APPLICATION_X_FRAME = new MimeType("application/x-frame"); public static final MimeType APPLICATION_X_FREELANCE = new MimeType("application/x-freelance"); public static final MimeType APPLICATION_X_GSP = new MimeType("application/x-gsp"); public static final MimeType APPLICATION_X_GSS = new MimeType("application/x-gss"); public static final MimeType APPLICATION_X_GTAR = new MimeType("application/x-gtar"); public static final MimeType APPLICATION_X_GZIP = new MimeType("application/x-gzip"); public static final MimeType APPLICATION_X_HDF = new MimeType("application/x-hdf"); public static final MimeType APPLICATION_X_HELPFILE = new MimeType("application/x-helpfile"); public static final MimeType APPLICATION_X_HTTPD_IMAP = new MimeType("application/x-httpd-imap"); public static final MimeType APPLICATION_X_IMA = new MimeType("application/x-ima"); public static final MimeType APPLICATION_X_INTERNETT_SIGNUP = new MimeType("application/x-internett-signup"); public static final MimeType APPLICATION_X_INVENTOR = new MimeType("application/x-inventor"); public static final MimeType APPLICATION_X_IP2 = new MimeType("application/x-ip2"); public static final MimeType APPLICATION_X_JAVASCRIPT = new MimeType("application/x-javascript"); public static final MimeType APPLICATION_X_JAVA_CLASS = new MimeType("application/x-java-class"); public static final MimeType APPLICATION_X_JAVA_COMMERCE = new MimeType("application/x-java-commerce"); public static final MimeType APPLICATION_X_KOAN = new MimeType("application/x-koan"); public static final MimeType APPLICATION_X_KSH = new MimeType("application/x-ksh"); public static final MimeType APPLICATION_X_LATEX = new MimeType("application/x-latex"); public static final MimeType APPLICATION_X_LHA = new MimeType("application/x-lha"); public static final MimeType APPLICATION_X_LISP = new MimeType("application/x-lisp"); public static final MimeType APPLICATION_X_LIVESCREEN = new MimeType("application/x-livescreen"); public static final MimeType APPLICATION_X_LOTUS = new MimeType("application/x-lotus"); public static final MimeType APPLICATION_X_LOTUSSCREENCAM = new MimeType("application/x-lotusscreencam"); public static final MimeType APPLICATION_X_LZH = new MimeType("application/x-lzh"); public static final MimeType APPLICATION_X_LZX = new MimeType("application/x-lzx"); public static final MimeType APPLICATION_X_MACBINARY = new MimeType("application/x-macbinary"); public static final MimeType APPLICATION_X_MAC_BINHEX40 = new MimeType("application/x-mac-binhex40"); public static final MimeType APPLICATION_X_MAGIC_CAP_PACKAGE_1_0 = new MimeType("application/x-magic-cap-package-1.0"); public static final MimeType APPLICATION_X_MATHCAD = new MimeType("application/x-mathcad"); public static final MimeType APPLICATION_X_MEME = new MimeType("application/x-meme"); public static final MimeType APPLICATION_X_MIDI = new MimeType("application/x-midi"); public static final MimeType APPLICATION_X_MIF = new MimeType("application/x-mif"); public static final MimeType APPLICATION_X_MIX_TRANSFER = new MimeType("application/x-mix-transfer"); public static final MimeType APPLICATION_X_MPLAYER2 = new MimeType("application/x-mplayer2"); public static final MimeType APPLICATION_X_MSEXCEL = new MimeType("application/x-msexcel"); public static final MimeType APPLICATION_X_MSPOWERPOINT = new MimeType("application/x-mspowerpoint"); public static final MimeType APPLICATION_X_NAVIDOC = new MimeType("application/x-navidoc"); public static final MimeType APPLICATION_X_NAVIMAP = new MimeType("application/x-navimap"); public static final MimeType APPLICATION_X_NAVISTYLE = new MimeType("application/x-navistyle"); public static final MimeType APPLICATION_X_NAVI_ANIMATION = new MimeType("application/x-navi-animation"); public static final MimeType APPLICATION_X_NETCDF = new MimeType("application/x-netcdf"); public static final MimeType APPLICATION_X_NEWTON_COMPATIBLE_PKG = new MimeType("application/x-newton-compatible-pkg"); public static final MimeType APPLICATION_X_NOKIA_9000_COMMUNICATOR_ADD_ON_SOFTWARE = new MimeType("application/x-nokia-9000-communicator-add-on-software"); public static final MimeType APPLICATION_X_OMC = new MimeType("application/x-omc"); public static final MimeType APPLICATION_X_OMCDATAMAKER = new MimeType("application/x-omcdatamaker"); public static final MimeType APPLICATION_X_OMCREGERATOR = new MimeType("application/x-omcregerator"); public static final MimeType APPLICATION_X_PAGEMAKER = new MimeType("application/x-pagemaker"); public static final MimeType APPLICATION_X_PCL = new MimeType("application/x-pcl"); public static final MimeType APPLICATION_X_PIXCLSCRIPT = new MimeType("application/x-pixclscript"); public static final MimeType APPLICATION_X_PKCS10 = new MimeType("application/x-pkcs10"); public static final MimeType APPLICATION_X_PKCS12 = new MimeType("application/x-pkcs12"); public static final MimeType APPLICATION_X_PKCS7_CERTIFICATES = new MimeType("application/x-pkcs7-certificates"); public static final MimeType APPLICATION_X_PKCS7_CERTREQRESP = new MimeType("application/x-pkcs7-certreqresp"); public static final MimeType APPLICATION_X_PKCS7_MIME = new MimeType("application/x-pkcs7-mime"); public static final MimeType APPLICATION_X_PKCS7_SIGNATURE = new MimeType("application/x-pkcs7-signature"); public static final MimeType APPLICATION_X_POINTPLUS = new MimeType("application/x-pointplus"); public static final MimeType APPLICATION_X_PORTABLE_ANYMAP = new MimeType("application/x-portable-anymap"); public static final MimeType APPLICATION_X_PROJECT = new MimeType("application/x-project"); public static final MimeType APPLICATION_X_QPRO = new MimeType("application/x-qpro"); public static final MimeType APPLICATION_X_RTF = new MimeType("application/x-rtf"); public static final MimeType APPLICATION_X_SDP = new MimeType("application/x-sdp"); public static final MimeType APPLICATION_X_SEA = new MimeType("application/x-sea"); public static final MimeType APPLICATION_X_SEELOGO = new MimeType("application/x-seelogo"); public static final MimeType APPLICATION_X_SH = new MimeType("application/x-sh"); public static final MimeType APPLICATION_X_SHAR = new MimeType("application/x-shar"); public static final MimeType APPLICATION_X_SHOCKWAVE_FLASH = new MimeType("application/x-shockwave-flash"); public static final MimeType APPLICATION_X_SIT = new MimeType("application/x-sit"); public static final MimeType APPLICATION_X_SPRITE = new MimeType("application/x-sprite"); public static final MimeType APPLICATION_X_STUFFIT = new MimeType("application/x-stuffit"); public static final MimeType APPLICATION_X_SV4CPIO = new MimeType("application/x-sv4cpio"); public static final MimeType APPLICATION_X_SV4CRC = new MimeType("application/x-sv4crc"); public static final MimeType APPLICATION_X_TAR = new MimeType("application/x-tar"); public static final MimeType APPLICATION_X_TBOOK = new MimeType("application/x-tbook"); public static final MimeType APPLICATION_X_TCL = new MimeType("application/x-tcl"); public static final MimeType APPLICATION_X_TEX = new MimeType("application/x-tex"); public static final MimeType APPLICATION_X_TEXINFO = new MimeType("application/x-texinfo"); public static final MimeType APPLICATION_X_TROFF = new MimeType("application/x-troff"); public static final MimeType APPLICATION_X_TROFF_MAN = new MimeType("application/x-troff-man"); public static final MimeType APPLICATION_X_TROFF_ME = new MimeType("application/x-troff-me"); public static final MimeType APPLICATION_X_TROFF_MS = new MimeType("application/x-troff-ms"); public static final MimeType APPLICATION_X_TROFF_MSVIDEO = new MimeType("application/x-troff-msvideo"); public static final MimeType APPLICATION_X_USTAR = new MimeType("application/x-ustar"); public static final MimeType APPLICATION_X_VISIO = new MimeType("application/x-visio"); public static final MimeType APPLICATION_X_VND_AUDIOEXPLOSION_MZZ = new MimeType("application/x-vnd.audioexplosion.mzz"); public static final MimeType APPLICATION_X_VND_LS_XPIX = new MimeType("application/x-vnd.ls-xpix"); public static final MimeType APPLICATION_X_VRML = new MimeType("application/x-vrml"); public static final MimeType APPLICATION_X_WAIS_SOURCE = new MimeType("application/x-wais-source"); public static final MimeType APPLICATION_X_WINHELP = new MimeType("application/x-winhelp"); public static final MimeType APPLICATION_X_WINTALK = new MimeType("application/x-wintalk"); public static final MimeType APPLICATION_X_WORLD = new MimeType("application/x-world"); public static final MimeType APPLICATION_X_WPWIN = new MimeType("application/x-wpwin"); public static final MimeType APPLICATION_X_WRI = new MimeType("application/x-wri"); public static final MimeType APPLICATION_X_X509_CA_CERT = new MimeType("application/x-x509-ca-cert"); public static final MimeType APPLICATION_X_X509_USER_CERT = new MimeType("application/x-x509-user-cert"); public static final MimeType APPLICATION_X_ZIP_COMPRESSED = new MimeType("application/x-zip-compressed"); public static final MimeType APPLICATION_YAML = new MimeType("application/yaml"); public static final MimeType APPLICATION_ZIP = new MimeType("application/zip"); public static final MimeType AUDIO_AIFF = new MimeType("audio/aiff"); public static final MimeType AUDIO_BASIC = new MimeType("audio/basic"); public static final MimeType AUDIO_IT = new MimeType("audio/it"); public static final MimeType AUDIO_MAKE = new MimeType("audio/make"); public static final MimeType AUDIO_MAKE_MY_FUNK = new MimeType("audio/make.my.funk"); public static final MimeType AUDIO_MID = new MimeType("audio/mid"); public static final MimeType AUDIO_MIDI = new MimeType("audio/midi"); public static final MimeType AUDIO_MOD = new MimeType("audio/mod"); public static final MimeType AUDIO_MPEG = new MimeType("audio/mpeg"); public static final MimeType AUDIO_MPEG3 = new MimeType("audio/mpeg3"); public static final MimeType AUDIO_NSPAUDIO = new MimeType("audio/nspaudio"); public static final MimeType AUDIO_S3M = new MimeType("audio/s3m"); public static final MimeType AUDIO_TSPLAYER = new MimeType("audio/tsplayer"); public static final MimeType AUDIO_TSP_AUDIO = new MimeType("audio/tsp-audio"); public static final MimeType AUDIO_VND_QCELP = new MimeType("audio/vnd.qcelp"); public static final MimeType AUDIO_VOC = new MimeType("audio/voc"); public static final MimeType AUDIO_VOXWARE = new MimeType("audio/voxware"); public static final MimeType AUDIO_WAV = new MimeType("audio/wav"); public static final MimeType AUDIO_XM = new MimeType("audio/xm"); public static final MimeType AUDIO_X_ADPCM = new MimeType("audio/x-adpcm"); public static final MimeType AUDIO_X_AIFF = new MimeType("audio/x-aiff"); public static final MimeType AUDIO_X_AU = new MimeType("audio/x-au"); public static final MimeType AUDIO_X_GSM = new MimeType("audio/x-gsm"); public static final MimeType AUDIO_X_JAM = new MimeType("audio/x-jam"); public static final MimeType AUDIO_X_LIVEAUDIO = new MimeType("audio/x-liveaudio"); public static final MimeType AUDIO_X_MID = new MimeType("audio/x-mid"); public static final MimeType AUDIO_X_MIDI = new MimeType("audio/x-midi"); public static final MimeType AUDIO_X_MOD = new MimeType("audio/x-mod"); public static final MimeType AUDIO_X_MPEG = new MimeType("audio/x-mpeg"); public static final MimeType AUDIO_X_MPEG_3 = new MimeType("audio/x-mpeg-3"); public static final MimeType AUDIO_X_MPEQURL = new MimeType("audio/x-mpequrl"); public static final MimeType AUDIO_X_NSPAUDIO = new MimeType("audio/x-nspaudio"); public static final MimeType AUDIO_X_PN_REALAUDIO = new MimeType("audio/x-pn-realaudio"); public static final MimeType AUDIO_X_PN_REALAUDIO_PLUGIN = new MimeType("audio/x-pn-realaudio-plugin"); public static final MimeType AUDIO_X_PSID = new MimeType("audio/x-psid"); public static final MimeType AUDIO_X_REALAUDIO = new MimeType("audio/x-realaudio"); public static final MimeType AUDIO_X_TWINVQ = new MimeType("audio/x-twinvq"); public static final MimeType AUDIO_X_TWINVQ_PLUGIN = new MimeType("audio/x-twinvq-plugin"); public static final MimeType AUDIO_X_VND_AUDIOEXPLOSION_MJUICEMEDIAFILE = new MimeType("audio/x-vnd.audioexplosion.mjuicemediafile"); public static final MimeType AUDIO_X_VOC = new MimeType("audio/x-voc"); public static final MimeType AUDIO_X_WAV = new MimeType("audio/x-wav"); public static final MimeType CHEMICAL_X_PDB = new MimeType("chemical/x-pdb"); public static final MimeType DRAWING_X_DWF = new MimeType("drawing/x-dwf"); public static final MimeType IMAGE_BMP = new MimeType("image/bmp"); public static final MimeType IMAGE_CMU_RASTER = new MimeType("image/cmu-raster"); public static final MimeType IMAGE_FIF = new MimeType("image/fif"); public static final MimeType IMAGE_FLORIAN = new MimeType("image/florian"); public static final MimeType IMAGE_G3FAX = new MimeType("image/g3fax"); public static final MimeType IMAGE_GIF = new MimeType("image/gif"); public static final MimeType IMAGE_IEF = new MimeType("image/ief"); public static final MimeType IMAGE_JPEG = new MimeType("image/jpeg"); public static final MimeType IMAGE_JUTVISION = new MimeType("image/jutvision"); public static final MimeType IMAGE_NAPLPS = new MimeType("image/naplps"); public static final MimeType IMAGE_PICT = new MimeType("image/pict"); public static final MimeType IMAGE_PJPEG = new MimeType("image/pjpeg"); public static final MimeType IMAGE_PNG = new MimeType("image/png"); public static final MimeType IMAGE_TIFF = new MimeType("image/tiff"); public static final MimeType IMAGE_VASA = new MimeType("image/vasa"); public static final MimeType IMAGE_VND_DWG = new MimeType("image/vnd.dwg"); public static final MimeType IMAGE_VND_FPX = new MimeType("image/vnd.fpx"); public static final MimeType IMAGE_VND_NET_FPX = new MimeType("image/vnd.net-fpx"); public static final MimeType IMAGE_VND_RN_REALFLASH = new MimeType("image/vnd.rn-realflash"); public static final MimeType IMAGE_VND_RN_REALPIX = new MimeType("image/vnd.rn-realpix"); public static final MimeType IMAGE_VND_WAP_WBMP = new MimeType("image/vnd.wap.wbmp"); public static final MimeType IMAGE_VND_XIFF = new MimeType("image/vnd.xiff"); public static final MimeType IMAGE_XBM = new MimeType("image/xbm"); public static final MimeType IMAGE_XPM = new MimeType("image/xpm"); public static final MimeType IMAGE_X_CMU_RASTER = new MimeType("image/x-cmu-raster"); public static final MimeType IMAGE_X_DWG = new MimeType("image/x-dwg"); public static final MimeType IMAGE_X_ICON = new MimeType("image/x-icon"); public static final MimeType IMAGE_X_JG = new MimeType("image/x-jg"); public static final MimeType IMAGE_X_JPS = new MimeType("image/x-jps"); public static final MimeType IMAGE_X_NIFF = new MimeType("image/x-niff"); public static final MimeType IMAGE_X_PCX = new MimeType("image/x-pcx"); public static final MimeType IMAGE_X_PICT = new MimeType("image/x-pict"); public static final MimeType IMAGE_X_PORTABLE_ANYMAP = new MimeType("image/x-portable-anymap"); public static final MimeType IMAGE_X_PORTABLE_BITMAP = new MimeType("image/x-portable-bitmap"); public static final MimeType IMAGE_X_PORTABLE_GRAYMAP = new MimeType("image/x-portable-graymap"); public static final MimeType IMAGE_X_PORTABLE_GREYMAP = new MimeType("image/x-portable-greymap"); public static final MimeType IMAGE_X_PORTABLE_PIXMAP = new MimeType("image/x-portable-pixmap"); public static final MimeType IMAGE_X_QUICKTIME = new MimeType("image/x-quicktime"); public static final MimeType IMAGE_X_RGB = new MimeType("image/x-rgb"); public static final MimeType IMAGE_X_TIFF = new MimeType("image/x-tiff"); public static final MimeType IMAGE_X_WINDOWS_BMP = new MimeType("image/x-windows-bmp"); public static final MimeType IMAGE_X_XBITMAP = new MimeType("image/x-xbitmap"); public static final MimeType IMAGE_X_XBM = new MimeType("image/x-xbm"); public static final MimeType IMAGE_X_XPIXMAP = new MimeType("image/x-xpixmap"); public static final MimeType IMAGE_X_XWD = new MimeType("image/x-xwd"); public static final MimeType IMAGE_X_XWINDOWDUMP = new MimeType("image/x-xwindowdump"); public static final MimeType I_WORLD_I_VRML = new MimeType("i-world/i-vrml"); public static final MimeType MESSAGE_RFC822 = new MimeType("message/rfc822"); public static final MimeType MODEL_IGES = new MimeType("model/iges"); public static final MimeType MODEL_VND_DWF = new MimeType("model/vnd.dwf"); public static final MimeType MODEL_VRML = new MimeType("model/vrml"); public static final MimeType MODEL_X_POV = new MimeType("model/x-pov"); public static final MimeType MULTIPART_X_GZIP = new MimeType("multipart/x-gzip"); public static final MimeType MULTIPART_X_USTAR = new MimeType("multipart/x-ustar"); public static final MimeType MULTIPART_X_ZIP = new MimeType("multipart/x-zip"); public static final MimeType MUSIC_CRESCENDO = new MimeType("music/crescendo"); public static final MimeType MUSIC_X_KARAOKE = new MimeType("music/x-karaoke"); public static final MimeType PALEOVU_X_PV = new MimeType("paleovu/x-pv"); public static final MimeType TEXT_ASP = new MimeType("text/asp"); public static final MimeType TEXT_CSS = new MimeType("text/css"); public static final MimeType TEXT_ECMASCRIPT = new MimeType("text/ecmascript"); public static final MimeType TEXT_HTML = new MimeType("text/html"); public static final MimeType TEXT_JAVASCRIPT = new MimeType("text/javascript"); public static final MimeType TEXT_MCF = new MimeType("text/mcf"); public static final MimeType TEXT_PASCAL = new MimeType("text/pascal"); public static final MimeType TEXT_PLAIN = new MimeType("text/plain"); public static final MimeType TEXT_RICHTEXT = new MimeType("text/richtext"); public static final MimeType TEXT_SCRIPLET = new MimeType("text/scriplet"); public static final MimeType TEXT_SGML = new MimeType("text/sgml"); public static final MimeType TEXT_TAB_SEPARATED_VALUES = new MimeType("text/tab-separated-values"); public static final MimeType TEXT_URI_LIST = new MimeType("text/uri-list"); public static final MimeType TEXT_VND_ABC = new MimeType("text/vnd.abc"); public static final MimeType TEXT_VND_FMI_FLEXSTOR = new MimeType("text/vnd.fmi.flexstor"); public static final MimeType TEXT_VND_RN_REALTEXT = new MimeType("text/vnd.rn-realtext"); public static final MimeType TEXT_VND_WAP_WML = new MimeType("text/vnd.wap.wml"); public static final MimeType TEXT_VND_WAP_WMLSCRIPT = new MimeType("text/vnd.wap.wmlscript"); public static final MimeType TEXT_WEBVIEWHTML = new MimeType("text/webviewhtml"); public static final MimeType TEXT_XML = new MimeType("text/xml"); public static final MimeType TEXT_X_ASM = new MimeType("text/x-asm"); public static final MimeType TEXT_X_AUDIOSOFT_INTRA = new MimeType("text/x-audiosoft-intra"); public static final MimeType TEXT_X_C = new MimeType("text/x-c"); public static final MimeType TEXT_X_COMPONENT = new MimeType("text/x-component"); public static final MimeType TEXT_X_FORTRAN = new MimeType("text/x-fortran"); public static final MimeType TEXT_X_H = new MimeType("text/x-h"); public static final MimeType TEXT_X_JAVA_SOURCE = new MimeType("text/x-java-source"); public static final MimeType TEXT_X_LA_ASF = new MimeType("text/x-la-asf"); public static final MimeType TEXT_X_M = new MimeType("text/x-m"); public static final MimeType TEXT_X_PASCAL = new MimeType("text/x-pascal"); public static final MimeType TEXT_X_SCRIPT = new MimeType("text/x-script"); public static final MimeType TEXT_X_SCRIPT_CSH = new MimeType("text/x-script.csh"); public static final MimeType TEXT_X_SCRIPT_ELISP = new MimeType("text/x-script.elisp"); public static final MimeType TEXT_X_SCRIPT_GUILE = new MimeType("text/x-script.guile"); public static final MimeType TEXT_X_SCRIPT_KSH = new MimeType("text/x-script.ksh"); public static final MimeType TEXT_X_SCRIPT_LISP = new MimeType("text/x-script.lisp"); public static final MimeType TEXT_X_SCRIPT_PERL = new MimeType("text/x-script.perl"); public static final MimeType TEXT_X_SCRIPT_PERL_MODULE = new MimeType("text/x-script.perl-module"); public static final MimeType TEXT_X_SCRIPT_PHYTON = new MimeType("text/x-script.phyton"); public static final MimeType TEXT_X_SCRIPT_REXX = new MimeType("text/x-script.rexx"); public static final MimeType TEXT_X_SCRIPT_SCHEME = new MimeType("text/x-script.scheme"); public static final MimeType TEXT_X_SCRIPT_SH = new MimeType("text/x-script.sh"); public static final MimeType TEXT_X_SCRIPT_TCL = new MimeType("text/x-script.tcl"); public static final MimeType TEXT_X_SCRIPT_TCSH = new MimeType("text/x-script.tcsh"); public static final MimeType TEXT_X_SCRIPT_ZSH = new MimeType("text/x-script.zsh"); public static final MimeType TEXT_X_SERVER_PARSED_HTML = new MimeType("text/x-server-parsed-html"); public static final MimeType TEXT_X_SETEXT = new MimeType("text/x-setext"); public static final MimeType TEXT_X_SGML = new MimeType("text/x-sgml"); public static final MimeType TEXT_X_SPEECH = new MimeType("text/x-speech"); public static final MimeType TEXT_X_UIL = new MimeType("text/x-uil"); public static final MimeType TEXT_X_UUENCODE = new MimeType("text/x-uuencode"); public static final MimeType TEXT_X_VCALENDAR = new MimeType("text/x-vcalendar"); public static final MimeType VIDEO_ANIMAFLEX = new MimeType("video/animaflex"); public static final MimeType VIDEO_AVI = new MimeType("video/avi"); public static final MimeType VIDEO_AVS_VIDEO = new MimeType("video/avs-video"); public static final MimeType VIDEO_DL = new MimeType("video/dl"); public static final MimeType VIDEO_FLI = new MimeType("video/fli"); public static final MimeType VIDEO_GL = new MimeType("video/gl"); public static final MimeType VIDEO_MPEG = new MimeType("video/mpeg"); public static final MimeType VIDEO_MSVIDEO = new MimeType("video/msvideo"); public static final MimeType VIDEO_QUICKTIME = new MimeType("video/quicktime"); public static final MimeType VIDEO_VDO = new MimeType("video/vdo"); public static final MimeType VIDEO_VIVO = new MimeType("video/vivo"); public static final MimeType VIDEO_VND_RN_REALVIDEO = new MimeType("video/vnd.rn-realvideo"); public static final MimeType VIDEO_VND_VIVO = new MimeType("video/vnd.vivo"); public static final MimeType VIDEO_VOSAIC = new MimeType("video/vosaic"); public static final MimeType VIDEO_X_AMT_DEMORUN = new MimeType("video/x-amt-demorun"); public static final MimeType VIDEO_X_AMT_SHOWRUN = new MimeType("video/x-amt-showrun"); public static final MimeType VIDEO_X_ATOMIC3D_FEATURE = new MimeType("video/x-atomic3d-feature"); public static final MimeType VIDEO_X_DL = new MimeType("video/x-dl"); public static final MimeType VIDEO_X_DV = new MimeType("video/x-dv"); public static final MimeType VIDEO_X_FLI = new MimeType("video/x-fli"); public static final MimeType VIDEO_X_GL = new MimeType("video/x-gl"); public static final MimeType VIDEO_X_ISVIDEO = new MimeType("video/x-isvideo"); public static final MimeType VIDEO_X_MOTION_JPEG = new MimeType("video/x-motion-jpeg"); public static final MimeType VIDEO_X_MPEG = new MimeType("video/x-mpeg"); public static final MimeType VIDEO_X_MPEQ2A = new MimeType("video/x-mpeq2a"); public static final MimeType VIDEO_X_MSVIDEO = new MimeType("video/x-msvideo"); public static final MimeType VIDEO_X_MS_ASF = new MimeType("video/x-ms-asf"); public static final MimeType VIDEO_X_MS_ASF_PLUGIN = new MimeType("video/x-ms-asf-plugin"); public static final MimeType VIDEO_X_QTC = new MimeType("video/x-qtc"); public static final MimeType VIDEO_X_SCM = new MimeType("video/x-scm"); public static final MimeType VIDEO_X_SGI_MOVIE = new MimeType("video/x-sgi-movie"); public static final MimeType WINDOWS_METAFILE = new MimeType("windows/metafile"); public static final MimeType WWW_MIME = new MimeType("www/mime"); public static final MimeType XGL_DRAWING = new MimeType("xgl/drawing"); public static final MimeType XGL_MOVIE = new MimeType("xgl/movie"); public static final MimeType X_CONFERENCE_X_COOLTALK = new MimeType("x-conference/x-cooltalk"); public static final MimeType X_MUSIC_X_MIDI = new MimeType("x-music/x-midi"); public static final MimeType X_WORLD_X_3DMF = new MimeType("x-world/x-3dmf"); public static final MimeType X_WORLD_X_SVR = new MimeType("x-world/x-svr"); public static final MimeType X_WORLD_X_VRML = new MimeType("x-world/x-vrml"); public static final MimeType X_WORLD_X_VRT = new MimeType("x-world/x-vrt"); public static final String _3DM = "3dm"; public static final String _3DMF = "3dmf"; public static final String A = "a"; public static final String AAB = "aab"; public static final String AAM = "aam"; public static final String AAS = "aas"; public static final String ABC = "abc"; public static final String ACGI = "acgi"; public static final String AFL = "afl"; public static final String AI = "ai"; public static final String AIF = "aif"; public static final String AIFC = "aifc"; public static final String AIFF = "aiff"; public static final String AIM = "aim"; public static final String AIP = "aip"; public static final String ANI = "ani"; public static final String AOS = "aos"; public static final String APS = "aps"; public static final String ARC = "arc"; public static final String ARJ = "arj"; public static final String ART = "art"; public static final String ASF = "asf"; public static final String ASM = "asm"; public static final String ASP = "asp"; public static final String ASX = "asx"; public static final String AU = "au"; public static final String AVI = "avi"; public static final String AVS = "avs"; public static final String BCPIO = "bcpio"; public static final String BIN = "bin"; public static final String BM = "bm"; public static final String BMP = "bmp"; public static final String BOO = "boo"; public static final String BOOK = "book"; public static final String BOZ = "boz"; public static final String BSON = "bson"; public static final String BSH = "bsh"; public static final String BZ = "bz"; public static final String BZ2 = "bz2"; public static final String C = "c"; public static final String C_PLUS_PLUS = "c++"; public static final String CAT = "cat"; public static final String CC = "cc"; public static final String CCAD = "ccad"; public static final String CCO = "cco"; public static final String CDF = "cdf"; public static final String CER = "cer"; public static final String CHA = "cha"; public static final String CHAT = "chat"; public static final String CLASS = "class"; public static final String COM = "com"; public static final String CONF = "conf"; public static final String CPIO = "cpio"; public static final String CPP = "cpp"; public static final String CPT = "cpt"; public static final String CRL = "crl"; public static final String CRT = "crt"; public static final String CSH = "csh"; public static final String CSS = "css"; public static final String CXX = "cxx"; public static final String DCR = "dcr"; public static final String DEEPV = "deepv"; public static final String DEF = "def"; public static final String DER = "der"; public static final String DIF = "dif"; public static final String DIR = "dir"; public static final String DL = "dl"; public static final String DOC = "doc"; public static final String DOT = "dot"; public static final String DP = "dp"; public static final String DRW = "drw"; public static final String DUMP = "dump"; public static final String DV = "dv"; public static final String DVI = "dvi"; public static final String DWF = "dwf"; public static final String DWG = "dwg"; public static final String DXF = "dxf"; public static final String DXR = "dxr"; public static final String EL = "el"; public static final String ELC = "elc"; public static final String ENV = "env"; public static final String EPS = "eps"; public static final String ES = "es"; public static final String ETX = "etx"; public static final String EVY = "evy"; public static final String EXE = "exe"; public static final String F = "f"; public static final String F77 = "f77"; public static final String F90 = "f90"; public static final String FDF = "fdf"; public static final String FIF = "fif"; public static final String FLI = "fli"; public static final String FLO = "flo"; public static final String FLX = "flx"; public static final String FMF = "fmf"; public static final String FOR = "for"; public static final String FPX = "fpx"; public static final String FRL = "frl"; public static final String FUNK = "funk"; public static final String G = "g"; public static final String G3 = "g3"; public static final String GIF = "gif"; public static final String GL = "gl"; public static final String GSD = "gsd"; public static final String GSM = "gsm"; public static final String GSP = "gsp"; public static final String GSS = "gss"; public static final String GTAR = "gtar"; public static final String GZ = "gz"; public static final String GZIP = "gzip"; public static final String H = "h"; public static final String HDF = "hdf"; public static final String HELP = "help"; public static final String HGL = "hgl"; public static final String HH = "hh"; public static final String HLB = "hlb"; public static final String HLP = "hlp"; public static final String HPG = "hpg"; public static final String HPGL = "hpgl"; public static final String HQX = "hqx"; public static final String HTA = "hta"; public static final String HTC = "htc"; public static final String HTM = "htm"; public static final String HTML = "html"; public static final String HTMLS = "htmls"; public static final String HTT = "htt"; public static final String HTX = "htx"; public static final String ICE = "ice"; public static final String ICO = "ico"; public static final String IDC = "idc"; public static final String IEF = "ief"; public static final String IEFS = "iefs"; public static final String IGES = "iges"; public static final String IGS = "igs"; public static final String IMA = "ima"; public static final String IMAP = "imap"; public static final String INF = "inf"; public static final String INS = "ins"; public static final String IP = "ip"; public static final String ISU = "isu"; public static final String IT = "it"; public static final String IV = "iv"; public static final String IVR = "ivr"; public static final String IVY = "ivy"; public static final String JAM = "jam"; public static final String JAV = "jav"; public static final String JAVA = "java"; public static final String JCM = "jcm"; public static final String JFIF = "jfif"; public static final String JFIF_TBNL = "jfif-tbnl"; public static final String JPE = "jpe"; public static final String JPEG = "jpeg"; public static final String JPG = "jpg"; public static final String JPS = "jps"; public static final String JS = "js"; public static final String JSON = "json"; public static final String JUT = "jut"; public static final String KAR = "kar"; public static final String KSH = "ksh"; public static final String LA = "la"; public static final String LAM = "lam"; public static final String LATEX = "latex"; public static final String LHA = "lha"; public static final String LHX = "lhx"; public static final String LIST = "list"; public static final String LMA = "lma"; public static final String LOG = "log"; public static final String LSP = "lsp"; public static final String LST = "lst"; public static final String LSX = "lsx"; public static final String LTX = "ltx"; public static final String LZH = "lzh"; public static final String LZX = "lzx"; public static final String M = "m"; public static final String M1V = "m1v"; public static final String M2A = "m2a"; public static final String M2V = "m2v"; public static final String M3U = "m3u"; public static final String MAN = "man"; public static final String MAP = "map"; public static final String MAR = "mar"; public static final String MBD = "mbd"; public static final String MC$ = "mc$"; public static final String MCD = "mcd"; public static final String MCF = "mcf"; public static final String MCP = "mcp"; public static final String ME = "me"; public static final String MHT = "mht"; public static final String MHTML = "mhtml"; public static final String MID = "mid"; public static final String MIDI = "midi"; public static final String MIF = "mif"; public static final String MIME = "mime"; public static final String MJF = "mjf"; public static final String MJPG = "mjpg"; public static final String MM = "mm"; public static final String MME = "mme"; public static final String MOD = "mod"; public static final String MOOV = "moov"; public static final String MOV = "mov"; public static final String MOVIE = "movie"; public static final String MP2 = "mp2"; public static final String MP3 = "mp3"; public static final String MPA = "mpa"; public static final String MPC = "mpc"; public static final String MPE = "mpe"; public static final String MPEG = "mpeg"; public static final String MPG = "mpg"; public static final String MPGA = "mpga"; public static final String MPP = "mpp"; public static final String MPT = "mpt"; public static final String MPV = "mpv"; public static final String MPX = "mpx"; public static final String MRC = "mrc"; public static final String MS = "ms"; public static final String MV = "mv"; public static final String MY = "my"; public static final String MZZ = "mzz"; public static final String NAP = "nap"; public static final String NAPLPS = "naplps"; public static final String NC = "nc"; public static final String NCM = "ncm"; public static final String NIF = "nif"; public static final String NIFF = "niff"; public static final String NIX = "nix"; public static final String NSC = "nsc"; public static final String NVD = "nvd"; public static final String O = "o"; public static final String ODA = "oda"; public static final String OMC = "omc"; public static final String OMCD = "omcd"; public static final String OMCR = "omcr"; public static final String P = "p"; public static final String P10 = "p10"; public static final String P12 = "p12"; public static final String P7A = "p7a"; public static final String P7C = "p7c"; public static final String P7M = "p7m"; public static final String P7R = "p7r"; public static final String P7S = "p7s"; public static final String PART = "part"; public static final String PAS = "pas"; public static final String PBM = "pbm"; public static final String PCL = "pcl"; public static final String PCT = "pct"; public static final String PCX = "pcx"; public static final String PDB = "pdb"; public static final String PDF = "pdf"; public static final String PFUNK = "pfunk"; public static final String PGM = "pgm"; public static final String PIC = "pic"; public static final String PICT = "pict"; public static final String PKG = "pkg"; public static final String PKO = "pko"; public static final String PL = "pl"; public static final String PLX = "plx"; public static final String PM = "pm"; public static final String PM4 = "pm4"; public static final String PM5 = "pm5"; public static final String PNG = "png"; public static final String PNM = "pnm"; public static final String POT = "pot"; public static final String POV = "pov"; public static final String PPA = "ppa"; public static final String PPM = "ppm"; public static final String PPS = "pps"; public static final String PPT = "ppt"; public static final String PPZ = "ppz"; public static final String PRE = "pre"; public static final String PRT = "prt"; public static final String PS = "ps"; public static final String PSD = "psd"; public static final String PVU = "pvu"; public static final String PWZ = "pwz"; public static final String PY = "py"; public static final String PYC = "pyc"; public static final String QCP = "qcp"; public static final String QD3 = "qd3"; public static final String QD3D = "qd3d"; public static final String QIF = "qif"; public static final String QT = "qt"; public static final String QTC = "qtc"; public static final String QTI = "qti"; public static final String QTIF = "qtif"; public static final String RA = "ra"; public static final String RAM = "ram"; public static final String RAS = "ras"; public static final String RAST = "rast"; public static final String REXX = "rexx"; public static final String RF = "rf"; public static final String RGB = "rgb"; public static final String RM = "rm"; public static final String RMI = "rmi"; public static final String RMM = "rmm"; public static final String RMP = "rmp"; public static final String RNG = "rng"; public static final String RNX = "rnx"; public static final String ROFF = "roff"; public static final String RP = "rp"; public static final String RPM = "rpm"; public static final String RT = "rt"; public static final String RTF = "rtf"; public static final String RTX = "rtx"; public static final String RV = "rv"; public static final String S = "s"; public static final String S3M = "s3m"; public static final String SAVEME = "saveme"; public static final String SBK = "sbk"; public static final String SCM = "scm"; public static final String SDML = "sdml"; public static final String SDP = "sdp"; public static final String SDR = "sdr"; public static final String SEA = "sea"; public static final String SET = "set"; public static final String SGM = "sgm"; public static final String SGML = "sgml"; public static final String SH = "sh"; public static final String SHAR = "shar"; public static final String SHTML = "shtml"; public static final String SID = "sid"; public static final String SIT = "sit"; public static final String SKD = "skd"; public static final String SKM = "skm"; public static final String SKP = "skp"; public static final String SKT = "skt"; public static final String SL = "sl"; public static final String SMI = "smi"; public static final String SMIL = "smil"; public static final String SND = "snd"; public static final String SOL = "sol"; public static final String SPC = "spc"; public static final String SPL = "spl"; public static final String SPR = "spr"; public static final String SPRITE = "sprite"; public static final String SRC = "src"; public static final String SSI = "ssi"; public static final String SSM = "ssm"; public static final String SST = "sst"; public static final String STEP = "step"; public static final String STL = "stl"; public static final String STP = "stp"; public static final String SV4CPIO = "sv4cpio"; public static final String SV4CRC = "sv4crc"; public static final String SVF = "svf"; public static final String SVR = "svr"; public static final String SWF = "swf"; public static final String T = "t"; public static final String TALK = "talk"; public static final String TAR = "tar"; public static final String TBK = "tbk"; public static final String TCL = "tcl"; public static final String TCSH = "tcsh"; public static final String TEX = "tex"; public static final String TEXI = "texi"; public static final String TEXINFO = "texinfo"; public static final String TEXT = "text"; public static final String TGZ = "tgz"; public static final String TIF = "tif"; public static final String TIFF = "tiff"; public static final String TR = "tr"; public static final String TSI = "tsi"; public static final String TSP = "tsp"; public static final String TSV = "tsv"; public static final String TURBOT = "turbot"; public static final String TXT = "txt"; public static final String UIL = "uil"; public static final String UNI = "uni"; public static final String UNIS = "unis"; public static final String UNV = "unv"; public static final String URI = "uri"; public static final String URIS = "uris"; public static final String USTAR = "ustar"; public static final String UU = "uu"; public static final String UUE = "uue"; public static final String VCD = "vcd"; public static final String VCS = "vcs"; public static final String VDA = "vda"; public static final String VDO = "vdo"; public static final String VEW = "vew"; public static final String VIV = "viv"; public static final String VIVO = "vivo"; public static final String VMD = "vmd"; public static final String VMF = "vmf"; public static final String VOC = "voc"; public static final String VOS = "vos"; public static final String VOX = "vox"; public static final String VQE = "vqe"; public static final String VQF = "vqf"; public static final String VQL = "vql"; public static final String VRML = "vrml"; public static final String VRT = "vrt"; public static final String VSD = "vsd"; public static final String VST = "vst"; public static final String VSW = "vsw"; public static final String W60 = "w60"; public static final String W61 = "w61"; public static final String W6W = "w6w"; public static final String WAV = "wav"; public static final String WB1 = "wb1"; public static final String WBMP = "wbmp"; public static final String WEB = "web"; public static final String WIZ = "wiz"; public static final String WK1 = "wk1"; public static final String WMF = "wmf"; public static final String WML = "wml"; public static final String WMLC = "wmlc"; public static final String WMLS = "wmls"; public static final String WMLSC = "wmlsc"; public static final String WORD = "word"; public static final String WP = "wp"; public static final String WP5 = "wp5"; public static final String WP6 = "wp6"; public static final String WPD = "wpd"; public static final String WQ1 = "wq1"; public static final String WRI = "wri"; public static final String WRL = "wrl"; public static final String WRZ = "wrz"; public static final String WSC = "wsc"; public static final String WSRC = "wsrc"; public static final String WTK = "wtk"; public static final String X_PNG = "x-png"; public static final String XBM = "xbm"; public static final String XDR = "xdr"; public static final String XGZ = "xgz"; public static final String XIF = "xif"; public static final String XL = "xl"; public static final String XLA = "xla"; public static final String XLB = "xlb"; public static final String XLC = "xlc"; public static final String XLD = "xld"; public static final String XLK = "xlk"; public static final String XLL = "xll"; public static final String XLM = "xlm"; public static final String XLS = "xls"; public static final String XLT = "xlt"; public static final String XLV = "xlv"; public static final String XLW = "xlw"; public static final String XM = "xm"; public static final String XML = "xml"; public static final String XMZ = "xmz"; public static final String XPIX = "xpix"; public static final String XPM = "xpm"; public static final String XSR = "xsr"; public static final String XWD = "xwd"; public static final String XYZ = "xyz"; public static final String Z = "z"; public static final String ZIP = "zip"; public static final String ZOO = "zoo"; public static final String ZSH = "zsh"; private static final Map<String, MimeType> types = new HashMap<>(); private static final Map<String, MimeType> typesBySuffix = new HashMap<>(); static { types.put(APPLICATION_ACAD.toString(), APPLICATION_ACAD); types.put(APPLICATION_ARJ.toString(), APPLICATION_ARJ); types.put(APPLICATION_BASE64.toString(), APPLICATION_BASE64); types.put(APPLICATION_BINHEX.toString(), APPLICATION_BINHEX); types.put(APPLICATION_BINHEX4.toString(), APPLICATION_BINHEX4); types.put(APPLICATION_BOOK.toString(), APPLICATION_BOOK); types.put(APPLICATION_BSON.toString(), APPLICATION_BSON); types.put(APPLICATION_CDF.toString(), APPLICATION_CDF); types.put(APPLICATION_CLARISCAD.toString(), APPLICATION_CLARISCAD); types.put(APPLICATION_COMMONGROUND.toString(), APPLICATION_COMMONGROUND); types.put(APPLICATION_DRAFTING.toString(), APPLICATION_DRAFTING); types.put(APPLICATION_DSPTYPE.toString(), APPLICATION_DSPTYPE); types.put(APPLICATION_DXF.toString(), APPLICATION_DXF); types.put(APPLICATION_ECMASCRIPT.toString(), APPLICATION_ECMASCRIPT); types.put(APPLICATION_ENVOY.toString(), APPLICATION_ENVOY); types.put(APPLICATION_EXCEL.toString(), APPLICATION_EXCEL); types.put(APPLICATION_FRACTALS.toString(), APPLICATION_FRACTALS); types.put(APPLICATION_FREELOADER.toString(), APPLICATION_FREELOADER); types.put(APPLICATION_FUTURESPLASH.toString(), APPLICATION_FUTURESPLASH); types.put(APPLICATION_GNUTAR.toString(), APPLICATION_GNUTAR); types.put(APPLICATION_GROUPWISE.toString(), APPLICATION_GROUPWISE); types.put(APPLICATION_HLP.toString(), APPLICATION_HLP); types.put(APPLICATION_HTA.toString(), APPLICATION_HTA); types.put(APPLICATION_IGES.toString(), APPLICATION_IGES); types.put(APPLICATION_INF.toString(), APPLICATION_INF); types.put(APPLICATION_I_DEAS.toString(), APPLICATION_I_DEAS); types.put(APPLICATION_JAVA.toString(), APPLICATION_JAVA); types.put(APPLICATION_JSON.toString(), APPLICATION_JSON); types.put(APPLICATION_JAVASCRIPT.toString(), APPLICATION_JAVASCRIPT); types.put(APPLICATION_JAVA_BYTE_CODE.toString(), APPLICATION_JAVA_BYTE_CODE); types.put(APPLICATION_LHA.toString(), APPLICATION_LHA); types.put(APPLICATION_LZX.toString(), APPLICATION_LZX); types.put(APPLICATION_MACBINARY.toString(), APPLICATION_MACBINARY); types.put(APPLICATION_MAC_BINARY.toString(), APPLICATION_MAC_BINARY); types.put(APPLICATION_MAC_BINHEX.toString(), APPLICATION_MAC_BINHEX); types.put(APPLICATION_MAC_BINHEX40.toString(), APPLICATION_MAC_BINHEX40); types.put(APPLICATION_MAC_COMPACTPRO.toString(), APPLICATION_MAC_COMPACTPRO); types.put(APPLICATION_MARC.toString(), APPLICATION_MARC); types.put(APPLICATION_MBEDLET.toString(), APPLICATION_MBEDLET); types.put(APPLICATION_MCAD.toString(), APPLICATION_MCAD); types.put(APPLICATION_MIME.toString(), APPLICATION_MIME); types.put(APPLICATION_MSPOWERPOINT.toString(), APPLICATION_MSPOWERPOINT); types.put(APPLICATION_MSWORD.toString(), APPLICATION_MSWORD); types.put(APPLICATION_MSWRITE.toString(), APPLICATION_MSWRITE); types.put(APPLICATION_NETMC.toString(), APPLICATION_NETMC); types.put(APPLICATION_OCTET_STREAM.toString(), APPLICATION_OCTET_STREAM); types.put(APPLICATION_ODA.toString(), APPLICATION_ODA); types.put(APPLICATION_PDF.toString(), APPLICATION_PDF); types.put(APPLICATION_PKCS10.toString(), APPLICATION_PKCS10); types.put(APPLICATION_PKCS7_MIME.toString(), APPLICATION_PKCS7_MIME); types.put(APPLICATION_PKCS7_SIGNATURE.toString(), APPLICATION_PKCS7_SIGNATURE); types.put(APPLICATION_PKCS_12.toString(), APPLICATION_PKCS_12); types.put(APPLICATION_PKCS_CRL.toString(), APPLICATION_PKCS_CRL); types.put(APPLICATION_PKIX_CERT.toString(), APPLICATION_PKIX_CERT); types.put(APPLICATION_PKIX_CRL.toString(), APPLICATION_PKIX_CRL); types.put(APPLICATION_PLAIN.toString(), APPLICATION_PLAIN); types.put(APPLICATION_POSTSCRIPT.toString(), APPLICATION_POSTSCRIPT); types.put(APPLICATION_POWERPOINT.toString(), APPLICATION_POWERPOINT); types.put(APPLICATION_PRO_ENG.toString(), APPLICATION_PRO_ENG); types.put(APPLICATION_RINGING_TONES.toString(), APPLICATION_RINGING_TONES); types.put(APPLICATION_RTF.toString(), APPLICATION_RTF); types.put(APPLICATION_SDP.toString(), APPLICATION_SDP); types.put(APPLICATION_SEA.toString(), APPLICATION_SEA); types.put(APPLICATION_SET.toString(), APPLICATION_SET); types.put(APPLICATION_SLA.toString(), APPLICATION_SLA); types.put(APPLICATION_SMIL.toString(), APPLICATION_SMIL); types.put(APPLICATION_SOLIDS.toString(), APPLICATION_SOLIDS); types.put(APPLICATION_SOUNDER.toString(), APPLICATION_SOUNDER); types.put(APPLICATION_STEP.toString(), APPLICATION_STEP); types.put(APPLICATION_STREAMINGMEDIA.toString(), APPLICATION_STREAMINGMEDIA); types.put(APPLICATION_TOOLBOOK.toString(), APPLICATION_TOOLBOOK); types.put(APPLICATION_VDA.toString(), APPLICATION_VDA); types.put(APPLICATION_VND_FDF.toString(), APPLICATION_VND_FDF); types.put(APPLICATION_VND_HP_HPGL.toString(), APPLICATION_VND_HP_HPGL); types.put(APPLICATION_VND_HP_PCL.toString(), APPLICATION_VND_HP_PCL); types.put(APPLICATION_VND_MS_EXCEL.toString(), APPLICATION_VND_MS_EXCEL); types.put(APPLICATION_VND_MS_PKI_CERTSTORE.toString(), APPLICATION_VND_MS_PKI_CERTSTORE); types.put(APPLICATION_VND_MS_PKI_PKO.toString(), APPLICATION_VND_MS_PKI_PKO); types.put(APPLICATION_VND_MS_PKI_SECCAT.toString(), APPLICATION_VND_MS_PKI_SECCAT); types.put(APPLICATION_VND_MS_PKI_STL.toString(), APPLICATION_VND_MS_PKI_STL); types.put(APPLICATION_VND_MS_POWERPOINT.toString(), APPLICATION_VND_MS_POWERPOINT); types.put(APPLICATION_VND_MS_PROJECT.toString(), APPLICATION_VND_MS_PROJECT); types.put(APPLICATION_VND_NOKIA_CONFIGURATION_MESSAGE.toString(), APPLICATION_VND_NOKIA_CONFIGURATION_MESSAGE); types.put(APPLICATION_VND_NOKIA_RINGING_TONE.toString(), APPLICATION_VND_NOKIA_RINGING_TONE); types.put(APPLICATION_VND_RN_REALMEDIA.toString(), APPLICATION_VND_RN_REALMEDIA); types.put(APPLICATION_VND_RN_REALPLAYER.toString(), APPLICATION_VND_RN_REALPLAYER); types.put(APPLICATION_VND_WAP_WMLC.toString(), APPLICATION_VND_WAP_WMLC); types.put(APPLICATION_VND_WAP_WMLSCRIPTC.toString(), APPLICATION_VND_WAP_WMLSCRIPTC); types.put(APPLICATION_VND_XARA.toString(), APPLICATION_VND_XARA); types.put(APPLICATION_VOCALTEC_MEDIA_DESC.toString(), APPLICATION_VOCALTEC_MEDIA_DESC); types.put(APPLICATION_VOCALTEC_MEDIA_FILE.toString(), APPLICATION_VOCALTEC_MEDIA_FILE); types.put(APPLICATION_WORDPERFECT.toString(), APPLICATION_WORDPERFECT); types.put(APPLICATION_WORDPERFECT6_0.toString(), APPLICATION_WORDPERFECT6_0); types.put(APPLICATION_WORDPERFECT6_1.toString(), APPLICATION_WORDPERFECT6_1); types.put(APPLICATION_XML.toString(), APPLICATION_XML); types.put(APPLICATION_X_123.toString(), APPLICATION_X_123); types.put(APPLICATION_X_AIM.toString(), APPLICATION_X_AIM); types.put(APPLICATION_X_AUTHORWARE_BIN.toString(), APPLICATION_X_AUTHORWARE_BIN); types.put(APPLICATION_X_AUTHORWARE_MAP.toString(), APPLICATION_X_AUTHORWARE_MAP); types.put(APPLICATION_X_AUTHORWARE_SEG.toString(), APPLICATION_X_AUTHORWARE_SEG); types.put(APPLICATION_X_BCPIO.toString(), APPLICATION_X_BCPIO); types.put(APPLICATION_X_BINARY.toString(), APPLICATION_X_BINARY); types.put(APPLICATION_X_BINHEX40.toString(), APPLICATION_X_BINHEX40); types.put(APPLICATION_X_BSH.toString(), APPLICATION_X_BSH); types.put(APPLICATION_X_BYTECODE_ELISP.toString(), APPLICATION_X_BYTECODE_ELISP); types.put(APPLICATION_X_BYTECODE_PYTHON.toString(), APPLICATION_X_BYTECODE_PYTHON); types.put(APPLICATION_X_BZIP.toString(), APPLICATION_X_BZIP); types.put(APPLICATION_X_BZIP2.toString(), APPLICATION_X_BZIP2); types.put(APPLICATION_X_CDF.toString(), APPLICATION_X_CDF); types.put(APPLICATION_X_CDLINK.toString(), APPLICATION_X_CDLINK); types.put(APPLICATION_X_CHAT.toString(), APPLICATION_X_CHAT); types.put(APPLICATION_X_CMU_RASTER.toString(), APPLICATION_X_CMU_RASTER); types.put(APPLICATION_X_COCOA.toString(), APPLICATION_X_COCOA); types.put(APPLICATION_X_COMPACTPRO.toString(), APPLICATION_X_COMPACTPRO); types.put(APPLICATION_X_COMPRESS.toString(), APPLICATION_X_COMPRESS); types.put(APPLICATION_X_COMPRESSED.toString(), APPLICATION_X_COMPRESSED); types.put(APPLICATION_X_CONFERENCE.toString(), APPLICATION_X_CONFERENCE); types.put(APPLICATION_X_CPIO.toString(), APPLICATION_X_CPIO); types.put(APPLICATION_X_CPT.toString(), APPLICATION_X_CPT); types.put(APPLICATION_X_CSH.toString(), APPLICATION_X_CSH); types.put(APPLICATION_X_DEEPV.toString(), APPLICATION_X_DEEPV); types.put(APPLICATION_X_DIRECTOR.toString(), APPLICATION_X_DIRECTOR); types.put(APPLICATION_X_DVI.toString(), APPLICATION_X_DVI); types.put(APPLICATION_X_ELC.toString(), APPLICATION_X_ELC); types.put(APPLICATION_X_ENVOY.toString(), APPLICATION_X_ENVOY); types.put(APPLICATION_X_ESREHBER.toString(), APPLICATION_X_ESREHBER); types.put(APPLICATION_X_EXCEL.toString(), APPLICATION_X_EXCEL); types.put(APPLICATION_X_FRAME.toString(), APPLICATION_X_FRAME); types.put(APPLICATION_X_FREELANCE.toString(), APPLICATION_X_FREELANCE); types.put(APPLICATION_X_GSP.toString(), APPLICATION_X_GSP); types.put(APPLICATION_X_GSS.toString(), APPLICATION_X_GSS); types.put(APPLICATION_X_GTAR.toString(), APPLICATION_X_GTAR); types.put(APPLICATION_X_GZIP.toString(), APPLICATION_X_GZIP); types.put(APPLICATION_X_HDF.toString(), APPLICATION_X_HDF); types.put(APPLICATION_X_HELPFILE.toString(), APPLICATION_X_HELPFILE); types.put(APPLICATION_X_HTTPD_IMAP.toString(), APPLICATION_X_HTTPD_IMAP); types.put(APPLICATION_X_IMA.toString(), APPLICATION_X_IMA); types.put(APPLICATION_X_INTERNETT_SIGNUP.toString(), APPLICATION_X_INTERNETT_SIGNUP); types.put(APPLICATION_X_INVENTOR.toString(), APPLICATION_X_INVENTOR); types.put(APPLICATION_X_IP2.toString(), APPLICATION_X_IP2); types.put(APPLICATION_X_JAVASCRIPT.toString(), APPLICATION_X_JAVASCRIPT); types.put(APPLICATION_X_JAVA_CLASS.toString(), APPLICATION_X_JAVA_CLASS); types.put(APPLICATION_X_JAVA_COMMERCE.toString(), APPLICATION_X_JAVA_COMMERCE); types.put(APPLICATION_X_KOAN.toString(), APPLICATION_X_KOAN); types.put(APPLICATION_X_KSH.toString(), APPLICATION_X_KSH); types.put(APPLICATION_X_LATEX.toString(), APPLICATION_X_LATEX); types.put(APPLICATION_X_LHA.toString(), APPLICATION_X_LHA); types.put(APPLICATION_X_LISP.toString(), APPLICATION_X_LISP); types.put(APPLICATION_X_LIVESCREEN.toString(), APPLICATION_X_LIVESCREEN); types.put(APPLICATION_X_LOTUS.toString(), APPLICATION_X_LOTUS); types.put(APPLICATION_X_LOTUSSCREENCAM.toString(), APPLICATION_X_LOTUSSCREENCAM); types.put(APPLICATION_X_LZH.toString(), APPLICATION_X_LZH); types.put(APPLICATION_X_LZX.toString(), APPLICATION_X_LZX); types.put(APPLICATION_X_MACBINARY.toString(), APPLICATION_X_MACBINARY); types.put(APPLICATION_X_MAC_BINHEX40.toString(), APPLICATION_X_MAC_BINHEX40); types.put(APPLICATION_X_MAGIC_CAP_PACKAGE_1_0.toString(), APPLICATION_X_MAGIC_CAP_PACKAGE_1_0); types.put(APPLICATION_X_MATHCAD.toString(), APPLICATION_X_MATHCAD); types.put(APPLICATION_X_MEME.toString(), APPLICATION_X_MEME); types.put(APPLICATION_X_MIDI.toString(), APPLICATION_X_MIDI); types.put(APPLICATION_X_MIF.toString(), APPLICATION_X_MIF); types.put(APPLICATION_X_MIX_TRANSFER.toString(), APPLICATION_X_MIX_TRANSFER); types.put(APPLICATION_X_MPLAYER2.toString(), APPLICATION_X_MPLAYER2); types.put(APPLICATION_X_MSEXCEL.toString(), APPLICATION_X_MSEXCEL); types.put(APPLICATION_X_MSPOWERPOINT.toString(), APPLICATION_X_MSPOWERPOINT); types.put(APPLICATION_X_NAVIDOC.toString(), APPLICATION_X_NAVIDOC); types.put(APPLICATION_X_NAVIMAP.toString(), APPLICATION_X_NAVIMAP); types.put(APPLICATION_X_NAVISTYLE.toString(), APPLICATION_X_NAVISTYLE); types.put(APPLICATION_X_NAVI_ANIMATION.toString(), APPLICATION_X_NAVI_ANIMATION); types.put(APPLICATION_X_NETCDF.toString(), APPLICATION_X_NETCDF); types.put(APPLICATION_X_NEWTON_COMPATIBLE_PKG.toString(), APPLICATION_X_NEWTON_COMPATIBLE_PKG); types.put(APPLICATION_X_NOKIA_9000_COMMUNICATOR_ADD_ON_SOFTWARE.toString(), APPLICATION_X_NOKIA_9000_COMMUNICATOR_ADD_ON_SOFTWARE); types.put(APPLICATION_X_OMC.toString(), APPLICATION_X_OMC); types.put(APPLICATION_X_OMCDATAMAKER.toString(), APPLICATION_X_OMCDATAMAKER); types.put(APPLICATION_X_OMCREGERATOR.toString(), APPLICATION_X_OMCREGERATOR); types.put(APPLICATION_X_PAGEMAKER.toString(), APPLICATION_X_PAGEMAKER); types.put(APPLICATION_X_PCL.toString(), APPLICATION_X_PCL); types.put(APPLICATION_X_PIXCLSCRIPT.toString(), APPLICATION_X_PIXCLSCRIPT); types.put(APPLICATION_X_PKCS10.toString(), APPLICATION_X_PKCS10); types.put(APPLICATION_X_PKCS12.toString(), APPLICATION_X_PKCS12); types.put(APPLICATION_X_PKCS7_CERTIFICATES.toString(), APPLICATION_X_PKCS7_CERTIFICATES); types.put(APPLICATION_X_PKCS7_CERTREQRESP.toString(), APPLICATION_X_PKCS7_CERTREQRESP); types.put(APPLICATION_X_PKCS7_MIME.toString(), APPLICATION_X_PKCS7_MIME); types.put(APPLICATION_X_PKCS7_SIGNATURE.toString(), APPLICATION_X_PKCS7_SIGNATURE); types.put(APPLICATION_X_POINTPLUS.toString(), APPLICATION_X_POINTPLUS); types.put(APPLICATION_X_PORTABLE_ANYMAP.toString(), APPLICATION_X_PORTABLE_ANYMAP); types.put(APPLICATION_X_PROJECT.toString(), APPLICATION_X_PROJECT); types.put(APPLICATION_X_QPRO.toString(), APPLICATION_X_QPRO); types.put(APPLICATION_X_RTF.toString(), APPLICATION_X_RTF); types.put(APPLICATION_X_SDP.toString(), APPLICATION_X_SDP); types.put(APPLICATION_X_SEA.toString(), APPLICATION_X_SEA); types.put(APPLICATION_X_SEELOGO.toString(), APPLICATION_X_SEELOGO); types.put(APPLICATION_X_SH.toString(), APPLICATION_X_SH); types.put(APPLICATION_X_SHAR.toString(), APPLICATION_X_SHAR); types.put(APPLICATION_X_SHOCKWAVE_FLASH.toString(), APPLICATION_X_SHOCKWAVE_FLASH); types.put(APPLICATION_X_SIT.toString(), APPLICATION_X_SIT); types.put(APPLICATION_X_SPRITE.toString(), APPLICATION_X_SPRITE); types.put(APPLICATION_X_STUFFIT.toString(), APPLICATION_X_STUFFIT); types.put(APPLICATION_X_SV4CPIO.toString(), APPLICATION_X_SV4CPIO); types.put(APPLICATION_X_SV4CRC.toString(), APPLICATION_X_SV4CRC); types.put(APPLICATION_X_TAR.toString(), APPLICATION_X_TAR); types.put(APPLICATION_X_TBOOK.toString(), APPLICATION_X_TBOOK); types.put(APPLICATION_X_TCL.toString(), APPLICATION_X_TCL); types.put(APPLICATION_X_TEX.toString(), APPLICATION_X_TEX); types.put(APPLICATION_X_TEXINFO.toString(), APPLICATION_X_TEXINFO); types.put(APPLICATION_X_TROFF.toString(), APPLICATION_X_TROFF); types.put(APPLICATION_X_TROFF_MAN.toString(), APPLICATION_X_TROFF_MAN); types.put(APPLICATION_X_TROFF_ME.toString(), APPLICATION_X_TROFF_ME); types.put(APPLICATION_X_TROFF_MS.toString(), APPLICATION_X_TROFF_MS); types.put(APPLICATION_X_TROFF_MSVIDEO.toString(), APPLICATION_X_TROFF_MSVIDEO); types.put(APPLICATION_X_USTAR.toString(), APPLICATION_X_USTAR); types.put(APPLICATION_X_VISIO.toString(), APPLICATION_X_VISIO); types.put(APPLICATION_X_VND_AUDIOEXPLOSION_MZZ.toString(), APPLICATION_X_VND_AUDIOEXPLOSION_MZZ); types.put(APPLICATION_X_VND_LS_XPIX.toString(), APPLICATION_X_VND_LS_XPIX); types.put(APPLICATION_X_VRML.toString(), APPLICATION_X_VRML); types.put(APPLICATION_X_WAIS_SOURCE.toString(), APPLICATION_X_WAIS_SOURCE); types.put(APPLICATION_X_WINHELP.toString(), APPLICATION_X_WINHELP); types.put(APPLICATION_X_WINTALK.toString(), APPLICATION_X_WINTALK); types.put(APPLICATION_X_WORLD.toString(), APPLICATION_X_WORLD); types.put(APPLICATION_X_WPWIN.toString(), APPLICATION_X_WPWIN); types.put(APPLICATION_X_WRI.toString(), APPLICATION_X_WRI); types.put(APPLICATION_X_X509_CA_CERT.toString(), APPLICATION_X_X509_CA_CERT); types.put(APPLICATION_X_X509_USER_CERT.toString(), APPLICATION_X_X509_USER_CERT); types.put(APPLICATION_X_ZIP_COMPRESSED.toString(), APPLICATION_X_ZIP_COMPRESSED); types.put(APPLICATION_ZIP.toString(), APPLICATION_ZIP); types.put(AUDIO_AIFF.toString(), AUDIO_AIFF); types.put(AUDIO_BASIC.toString(), AUDIO_BASIC); types.put(AUDIO_IT.toString(), AUDIO_IT); types.put(AUDIO_MAKE.toString(), AUDIO_MAKE); types.put(AUDIO_MAKE_MY_FUNK.toString(), AUDIO_MAKE_MY_FUNK); types.put(AUDIO_MID.toString(), AUDIO_MID); types.put(AUDIO_MIDI.toString(), AUDIO_MIDI); types.put(AUDIO_MOD.toString(), AUDIO_MOD); types.put(AUDIO_MPEG.toString(), AUDIO_MPEG); types.put(AUDIO_MPEG3.toString(), AUDIO_MPEG3); types.put(AUDIO_NSPAUDIO.toString(), AUDIO_NSPAUDIO); types.put(AUDIO_S3M.toString(), AUDIO_S3M); types.put(AUDIO_TSPLAYER.toString(), AUDIO_TSPLAYER); types.put(AUDIO_TSP_AUDIO.toString(), AUDIO_TSP_AUDIO); types.put(AUDIO_VND_QCELP.toString(), AUDIO_VND_QCELP); types.put(AUDIO_VOC.toString(), AUDIO_VOC); types.put(AUDIO_VOXWARE.toString(), AUDIO_VOXWARE); types.put(AUDIO_WAV.toString(), AUDIO_WAV); types.put(AUDIO_XM.toString(), AUDIO_XM); types.put(AUDIO_X_ADPCM.toString(), AUDIO_X_ADPCM); types.put(AUDIO_X_AIFF.toString(), AUDIO_X_AIFF); types.put(AUDIO_X_AU.toString(), AUDIO_X_AU); types.put(AUDIO_X_GSM.toString(), AUDIO_X_GSM); types.put(AUDIO_X_JAM.toString(), AUDIO_X_JAM); types.put(AUDIO_X_LIVEAUDIO.toString(), AUDIO_X_LIVEAUDIO); types.put(AUDIO_X_MID.toString(), AUDIO_X_MID); types.put(AUDIO_X_MIDI.toString(), AUDIO_X_MIDI); types.put(AUDIO_X_MOD.toString(), AUDIO_X_MOD); types.put(AUDIO_X_MPEG.toString(), AUDIO_X_MPEG); types.put(AUDIO_X_MPEG_3.toString(), AUDIO_X_MPEG_3); types.put(AUDIO_X_MPEQURL.toString(), AUDIO_X_MPEQURL); types.put(AUDIO_X_NSPAUDIO.toString(), AUDIO_X_NSPAUDIO); types.put(AUDIO_X_PN_REALAUDIO.toString(), AUDIO_X_PN_REALAUDIO); types.put(AUDIO_X_PN_REALAUDIO_PLUGIN.toString(), AUDIO_X_PN_REALAUDIO_PLUGIN); types.put(AUDIO_X_PSID.toString(), AUDIO_X_PSID); types.put(AUDIO_X_REALAUDIO.toString(), AUDIO_X_REALAUDIO); types.put(AUDIO_X_TWINVQ.toString(), AUDIO_X_TWINVQ); types.put(AUDIO_X_TWINVQ_PLUGIN.toString(), AUDIO_X_TWINVQ_PLUGIN); types.put(AUDIO_X_VND_AUDIOEXPLOSION_MJUICEMEDIAFILE.toString(), AUDIO_X_VND_AUDIOEXPLOSION_MJUICEMEDIAFILE); types.put(AUDIO_X_VOC.toString(), AUDIO_X_VOC); types.put(AUDIO_X_WAV.toString(), AUDIO_X_WAV); types.put(CHEMICAL_X_PDB.toString(), CHEMICAL_X_PDB); types.put(DRAWING_X_DWF.toString(), DRAWING_X_DWF); types.put(IMAGE_BMP.toString(), IMAGE_BMP); types.put(IMAGE_CMU_RASTER.toString(), IMAGE_CMU_RASTER); types.put(IMAGE_FIF.toString(), IMAGE_FIF); types.put(IMAGE_FLORIAN.toString(), IMAGE_FLORIAN); types.put(IMAGE_G3FAX.toString(), IMAGE_G3FAX); types.put(IMAGE_GIF.toString(), IMAGE_GIF); types.put(IMAGE_IEF.toString(), IMAGE_IEF); types.put(IMAGE_JPEG.toString(), IMAGE_JPEG); types.put(IMAGE_JUTVISION.toString(), IMAGE_JUTVISION); types.put(IMAGE_NAPLPS.toString(), IMAGE_NAPLPS); types.put(IMAGE_PICT.toString(), IMAGE_PICT); types.put(IMAGE_PJPEG.toString(), IMAGE_PJPEG); types.put(IMAGE_PNG.toString(), IMAGE_PNG); types.put(IMAGE_TIFF.toString(), IMAGE_TIFF); types.put(IMAGE_VASA.toString(), IMAGE_VASA); types.put(IMAGE_VND_DWG.toString(), IMAGE_VND_DWG); types.put(IMAGE_VND_FPX.toString(), IMAGE_VND_FPX); types.put(IMAGE_VND_NET_FPX.toString(), IMAGE_VND_NET_FPX); types.put(IMAGE_VND_RN_REALFLASH.toString(), IMAGE_VND_RN_REALFLASH); types.put(IMAGE_VND_RN_REALPIX.toString(), IMAGE_VND_RN_REALPIX); types.put(IMAGE_VND_WAP_WBMP.toString(), IMAGE_VND_WAP_WBMP); types.put(IMAGE_VND_XIFF.toString(), IMAGE_VND_XIFF); types.put(IMAGE_XBM.toString(), IMAGE_XBM); types.put(IMAGE_XPM.toString(), IMAGE_XPM); types.put(IMAGE_X_CMU_RASTER.toString(), IMAGE_X_CMU_RASTER); types.put(IMAGE_X_DWG.toString(), IMAGE_X_DWG); types.put(IMAGE_X_ICON.toString(), IMAGE_X_ICON); types.put(IMAGE_X_JG.toString(), IMAGE_X_JG); types.put(IMAGE_X_JPS.toString(), IMAGE_X_JPS); types.put(IMAGE_X_NIFF.toString(), IMAGE_X_NIFF); types.put(IMAGE_X_PCX.toString(), IMAGE_X_PCX); types.put(IMAGE_X_PICT.toString(), IMAGE_X_PICT); types.put(IMAGE_X_PORTABLE_ANYMAP.toString(), IMAGE_X_PORTABLE_ANYMAP); types.put(IMAGE_X_PORTABLE_BITMAP.toString(), IMAGE_X_PORTABLE_BITMAP); types.put(IMAGE_X_PORTABLE_GRAYMAP.toString(), IMAGE_X_PORTABLE_GRAYMAP); types.put(IMAGE_X_PORTABLE_GREYMAP.toString(), IMAGE_X_PORTABLE_GREYMAP); types.put(IMAGE_X_PORTABLE_PIXMAP.toString(), IMAGE_X_PORTABLE_PIXMAP); types.put(IMAGE_X_QUICKTIME.toString(), IMAGE_X_QUICKTIME); types.put(IMAGE_X_RGB.toString(), IMAGE_X_RGB); types.put(IMAGE_X_TIFF.toString(), IMAGE_X_TIFF); types.put(IMAGE_X_WINDOWS_BMP.toString(), IMAGE_X_WINDOWS_BMP); types.put(IMAGE_X_XBITMAP.toString(), IMAGE_X_XBITMAP); types.put(IMAGE_X_XBM.toString(), IMAGE_X_XBM); types.put(IMAGE_X_XPIXMAP.toString(), IMAGE_X_XPIXMAP); types.put(IMAGE_X_XWD.toString(), IMAGE_X_XWD); types.put(IMAGE_X_XWINDOWDUMP.toString(), IMAGE_X_XWINDOWDUMP); types.put(I_WORLD_I_VRML.toString(), I_WORLD_I_VRML); types.put(MESSAGE_RFC822.toString(), MESSAGE_RFC822); types.put(MODEL_IGES.toString(), MODEL_IGES); types.put(MODEL_VND_DWF.toString(), MODEL_VND_DWF); types.put(MODEL_VRML.toString(), MODEL_VRML); types.put(MODEL_X_POV.toString(), MODEL_X_POV); types.put(MULTIPART_X_GZIP.toString(), MULTIPART_X_GZIP); types.put(MULTIPART_X_USTAR.toString(), MULTIPART_X_USTAR); types.put(MULTIPART_X_ZIP.toString(), MULTIPART_X_ZIP); types.put(MUSIC_CRESCENDO.toString(), MUSIC_CRESCENDO); types.put(MUSIC_X_KARAOKE.toString(), MUSIC_X_KARAOKE); types.put(PALEOVU_X_PV.toString(), PALEOVU_X_PV); types.put(TEXT_ASP.toString(), TEXT_ASP); types.put(TEXT_CSS.toString(), TEXT_CSS); types.put(TEXT_ECMASCRIPT.toString(), TEXT_ECMASCRIPT); types.put(TEXT_HTML.toString(), TEXT_HTML); types.put(TEXT_JAVASCRIPT.toString(), TEXT_JAVASCRIPT); types.put(TEXT_MCF.toString(), TEXT_MCF); types.put(TEXT_PASCAL.toString(), TEXT_PASCAL); types.put(TEXT_PLAIN.toString(), TEXT_PLAIN); types.put(TEXT_RICHTEXT.toString(), TEXT_RICHTEXT); types.put(TEXT_SCRIPLET.toString(), TEXT_SCRIPLET); types.put(TEXT_SGML.toString(), TEXT_SGML); types.put(TEXT_TAB_SEPARATED_VALUES.toString(), TEXT_TAB_SEPARATED_VALUES); types.put(TEXT_URI_LIST.toString(), TEXT_URI_LIST); types.put(TEXT_VND_ABC.toString(), TEXT_VND_ABC); types.put(TEXT_VND_FMI_FLEXSTOR.toString(), TEXT_VND_FMI_FLEXSTOR); types.put(TEXT_VND_RN_REALTEXT.toString(), TEXT_VND_RN_REALTEXT); types.put(TEXT_VND_WAP_WML.toString(), TEXT_VND_WAP_WML); types.put(TEXT_VND_WAP_WMLSCRIPT.toString(), TEXT_VND_WAP_WMLSCRIPT); types.put(TEXT_WEBVIEWHTML.toString(), TEXT_WEBVIEWHTML); types.put(TEXT_XML.toString(), TEXT_XML); types.put(TEXT_X_ASM.toString(), TEXT_X_ASM); types.put(TEXT_X_AUDIOSOFT_INTRA.toString(), TEXT_X_AUDIOSOFT_INTRA); types.put(TEXT_X_C.toString(), TEXT_X_C); types.put(TEXT_X_COMPONENT.toString(), TEXT_X_COMPONENT); types.put(TEXT_X_FORTRAN.toString(), TEXT_X_FORTRAN); types.put(TEXT_X_H.toString(), TEXT_X_H); types.put(TEXT_X_JAVA_SOURCE.toString(), TEXT_X_JAVA_SOURCE); types.put(TEXT_X_LA_ASF.toString(), TEXT_X_LA_ASF); types.put(TEXT_X_M.toString(), TEXT_X_M); types.put(TEXT_X_PASCAL.toString(), TEXT_X_PASCAL); types.put(TEXT_X_SCRIPT.toString(), TEXT_X_SCRIPT); types.put(TEXT_X_SCRIPT_CSH.toString(), TEXT_X_SCRIPT_CSH); types.put(TEXT_X_SCRIPT_ELISP.toString(), TEXT_X_SCRIPT_ELISP); types.put(TEXT_X_SCRIPT_GUILE.toString(), TEXT_X_SCRIPT_GUILE); types.put(TEXT_X_SCRIPT_KSH.toString(), TEXT_X_SCRIPT_KSH); types.put(TEXT_X_SCRIPT_LISP.toString(), TEXT_X_SCRIPT_LISP); types.put(TEXT_X_SCRIPT_PERL.toString(), TEXT_X_SCRIPT_PERL); types.put(TEXT_X_SCRIPT_PERL_MODULE.toString(), TEXT_X_SCRIPT_PERL_MODULE); types.put(TEXT_X_SCRIPT_PHYTON.toString(), TEXT_X_SCRIPT_PHYTON); types.put(TEXT_X_SCRIPT_REXX.toString(), TEXT_X_SCRIPT_REXX); types.put(TEXT_X_SCRIPT_SCHEME.toString(), TEXT_X_SCRIPT_SCHEME); types.put(TEXT_X_SCRIPT_SH.toString(), TEXT_X_SCRIPT_SH); types.put(TEXT_X_SCRIPT_TCL.toString(), TEXT_X_SCRIPT_TCL); types.put(TEXT_X_SCRIPT_TCSH.toString(), TEXT_X_SCRIPT_TCSH); types.put(TEXT_X_SCRIPT_ZSH.toString(), TEXT_X_SCRIPT_ZSH); types.put(TEXT_X_SERVER_PARSED_HTML.toString(), TEXT_X_SERVER_PARSED_HTML); types.put(TEXT_X_SETEXT.toString(), TEXT_X_SETEXT); types.put(TEXT_X_SGML.toString(), TEXT_X_SGML); types.put(TEXT_X_SPEECH.toString(), TEXT_X_SPEECH); types.put(TEXT_X_UIL.toString(), TEXT_X_UIL); types.put(TEXT_X_UUENCODE.toString(), TEXT_X_UUENCODE); types.put(TEXT_X_VCALENDAR.toString(), TEXT_X_VCALENDAR); types.put(VIDEO_ANIMAFLEX.toString(), VIDEO_ANIMAFLEX); types.put(VIDEO_AVI.toString(), VIDEO_AVI); types.put(VIDEO_AVS_VIDEO.toString(), VIDEO_AVS_VIDEO); types.put(VIDEO_DL.toString(), VIDEO_DL); types.put(VIDEO_FLI.toString(), VIDEO_FLI); types.put(VIDEO_GL.toString(), VIDEO_GL); types.put(VIDEO_MPEG.toString(), VIDEO_MPEG); types.put(VIDEO_MSVIDEO.toString(), VIDEO_MSVIDEO); types.put(VIDEO_QUICKTIME.toString(), VIDEO_QUICKTIME); types.put(VIDEO_VDO.toString(), VIDEO_VDO); types.put(VIDEO_VIVO.toString(), VIDEO_VIVO); types.put(VIDEO_VND_RN_REALVIDEO.toString(), VIDEO_VND_RN_REALVIDEO); types.put(VIDEO_VND_VIVO.toString(), VIDEO_VND_VIVO); types.put(VIDEO_VOSAIC.toString(), VIDEO_VOSAIC); types.put(VIDEO_X_AMT_DEMORUN.toString(), VIDEO_X_AMT_DEMORUN); types.put(VIDEO_X_AMT_SHOWRUN.toString(), VIDEO_X_AMT_SHOWRUN); types.put(VIDEO_X_ATOMIC3D_FEATURE.toString(), VIDEO_X_ATOMIC3D_FEATURE); types.put(VIDEO_X_DL.toString(), VIDEO_X_DL); types.put(VIDEO_X_DV.toString(), VIDEO_X_DV); types.put(VIDEO_X_FLI.toString(), VIDEO_X_FLI); types.put(VIDEO_X_GL.toString(), VIDEO_X_GL); types.put(VIDEO_X_ISVIDEO.toString(), VIDEO_X_ISVIDEO); types.put(VIDEO_X_MOTION_JPEG.toString(), VIDEO_X_MOTION_JPEG); types.put(VIDEO_X_MPEG.toString(), VIDEO_X_MPEG); types.put(VIDEO_X_MPEQ2A.toString(), VIDEO_X_MPEQ2A); types.put(VIDEO_X_MSVIDEO.toString(), VIDEO_X_MSVIDEO); types.put(VIDEO_X_MS_ASF.toString(), VIDEO_X_MS_ASF); types.put(VIDEO_X_MS_ASF_PLUGIN.toString(), VIDEO_X_MS_ASF_PLUGIN); types.put(VIDEO_X_QTC.toString(), VIDEO_X_QTC); types.put(VIDEO_X_SCM.toString(), VIDEO_X_SCM); types.put(VIDEO_X_SGI_MOVIE.toString(), VIDEO_X_SGI_MOVIE); types.put(WINDOWS_METAFILE.toString(), WINDOWS_METAFILE); types.put(WWW_MIME.toString(), WWW_MIME); types.put(XGL_DRAWING.toString(), XGL_DRAWING); types.put(XGL_MOVIE.toString(), XGL_MOVIE); types.put(X_CONFERENCE_X_COOLTALK.toString(), X_CONFERENCE_X_COOLTALK); types.put(X_MUSIC_X_MIDI.toString(), X_MUSIC_X_MIDI); types.put(X_WORLD_X_3DMF.toString(), X_WORLD_X_3DMF); types.put(X_WORLD_X_SVR.toString(), X_WORLD_X_SVR); types.put(X_WORLD_X_VRML.toString(), X_WORLD_X_VRML); types.put(X_WORLD_X_VRT.toString(), X_WORLD_X_VRT); typesBySuffix.put(_3DM, X_WORLD_X_3DMF); typesBySuffix.put(A, APPLICATION_OCTET_STREAM); typesBySuffix.put(AAB, APPLICATION_X_AUTHORWARE_BIN); typesBySuffix.put(AAM, APPLICATION_X_AUTHORWARE_MAP); typesBySuffix.put(AAS, APPLICATION_X_AUTHORWARE_SEG); typesBySuffix.put(ABC, TEXT_VND_ABC); typesBySuffix.put(ACGI, TEXT_HTML); typesBySuffix.put(AFL, VIDEO_ANIMAFLEX); typesBySuffix.put(AI, APPLICATION_POSTSCRIPT); typesBySuffix.put(AIF, AUDIO_AIFF); typesBySuffix.put(AIM, APPLICATION_X_AIM); typesBySuffix.put(AIP, TEXT_X_AUDIOSOFT_INTRA); typesBySuffix.put(ANI, APPLICATION_X_NAVI_ANIMATION); typesBySuffix.put(AOS, APPLICATION_X_NOKIA_9000_COMMUNICATOR_ADD_ON_SOFTWARE); typesBySuffix.put(APS, APPLICATION_MIME); typesBySuffix.put(ARJ, APPLICATION_ARJ); typesBySuffix.put(ART, IMAGE_X_JG); typesBySuffix.put(ASF, VIDEO_X_MS_ASF); typesBySuffix.put(ASM, TEXT_X_ASM); typesBySuffix.put(ASP, TEXT_ASP); typesBySuffix.put(ASX, APPLICATION_X_MPLAYER2); typesBySuffix.put(AU, AUDIO_BASIC); typesBySuffix.put(AVI, APPLICATION_X_TROFF_MSVIDEO); typesBySuffix.put(AVS, VIDEO_AVS_VIDEO); typesBySuffix.put(BCPIO, APPLICATION_X_BCPIO); typesBySuffix.put(BIN, APPLICATION_MAC_BINARY); typesBySuffix.put(BM, IMAGE_BMP); typesBySuffix.put(BOO, APPLICATION_BOOK); typesBySuffix.put(BSON, APPLICATION_BSON); typesBySuffix.put(BOZ, APPLICATION_X_BZIP2); typesBySuffix.put(BSH, APPLICATION_X_BSH); typesBySuffix.put(BZ, APPLICATION_X_BZIP); typesBySuffix.put(C, TEXT_PLAIN); typesBySuffix.put(CAT, APPLICATION_VND_MS_PKI_SECCAT); typesBySuffix.put(CCAD, APPLICATION_CLARISCAD); typesBySuffix.put(CCO, APPLICATION_X_COCOA); typesBySuffix.put(CDF, APPLICATION_CDF); typesBySuffix.put(CER, APPLICATION_PKIX_CERT); typesBySuffix.put(CHA, APPLICATION_X_CHAT); typesBySuffix.put(CLASS, APPLICATION_JAVA); typesBySuffix.put(CPIO, APPLICATION_X_CPIO); typesBySuffix.put(CPT, APPLICATION_MAC_COMPACTPRO); typesBySuffix.put(CRL, APPLICATION_PKCS_CRL); typesBySuffix.put(CSH, APPLICATION_X_CSH); typesBySuffix.put(CSS, TEXT_CSS); typesBySuffix.put(DCR, APPLICATION_X_DIRECTOR); typesBySuffix.put(DEEPV, APPLICATION_X_DEEPV); typesBySuffix.put(DIF, VIDEO_X_DV); typesBySuffix.put(DL, VIDEO_DL); typesBySuffix.put(DOC, APPLICATION_MSWORD); typesBySuffix.put(DP, APPLICATION_COMMONGROUND); typesBySuffix.put(DRW, APPLICATION_DRAFTING); typesBySuffix.put(DVI, APPLICATION_X_DVI); typesBySuffix.put(DWF, DRAWING_X_DWF); typesBySuffix.put(DWG, APPLICATION_ACAD); typesBySuffix.put(DXF, APPLICATION_DXF); typesBySuffix.put(EL, TEXT_X_SCRIPT_ELISP); typesBySuffix.put(ELC, APPLICATION_X_BYTECODE_ELISP); typesBySuffix.put(ENV, APPLICATION_X_ENVOY); typesBySuffix.put(ES, APPLICATION_X_ESREHBER); typesBySuffix.put(ETX, TEXT_X_SETEXT); typesBySuffix.put(EVY, APPLICATION_ENVOY); typesBySuffix.put(FDF, APPLICATION_VND_FDF); typesBySuffix.put(FIF, APPLICATION_FRACTALS); typesBySuffix.put(FLI, VIDEO_FLI); typesBySuffix.put(FLO, IMAGE_FLORIAN); typesBySuffix.put(FLX, TEXT_VND_FMI_FLEXSTOR); typesBySuffix.put(FMF, VIDEO_X_ATOMIC3D_FEATURE); typesBySuffix.put(FPX, IMAGE_VND_FPX); typesBySuffix.put(FRL, APPLICATION_FREELOADER); typesBySuffix.put(FUNK, AUDIO_MAKE); typesBySuffix.put(G3, IMAGE_G3FAX); typesBySuffix.put(GIF, IMAGE_GIF); typesBySuffix.put(GL, VIDEO_GL); typesBySuffix.put(GSD, AUDIO_X_GSM); typesBySuffix.put(GSP, APPLICATION_X_GSP); typesBySuffix.put(GSS, APPLICATION_X_GSS); typesBySuffix.put(GTAR, APPLICATION_X_GTAR); typesBySuffix.put(GZ, APPLICATION_X_COMPRESSED); typesBySuffix.put(HDF, APPLICATION_X_HDF); typesBySuffix.put(HELP, APPLICATION_X_HELPFILE); typesBySuffix.put(HGL, APPLICATION_VND_HP_HPGL); typesBySuffix.put(HLB, TEXT_X_SCRIPT); typesBySuffix.put(HLP, APPLICATION_HLP); typesBySuffix.put(HQX, APPLICATION_BINHEX); typesBySuffix.put(HTA, APPLICATION_HTA); typesBySuffix.put(HTC, TEXT_X_COMPONENT); typesBySuffix.put(HTT, TEXT_WEBVIEWHTML); typesBySuffix.put(ICE, X_CONFERENCE_X_COOLTALK); typesBySuffix.put(ICO, IMAGE_X_ICON); typesBySuffix.put(IEF, IMAGE_IEF); typesBySuffix.put(IGES, APPLICATION_IGES); typesBySuffix.put(IMA, APPLICATION_X_IMA); typesBySuffix.put(IMAP, APPLICATION_X_HTTPD_IMAP); typesBySuffix.put(INF, APPLICATION_INF); typesBySuffix.put(INS, APPLICATION_X_INTERNETT_SIGNUP); typesBySuffix.put(IP, APPLICATION_X_IP2); typesBySuffix.put(ISU, VIDEO_X_ISVIDEO); typesBySuffix.put(IT, AUDIO_IT); typesBySuffix.put(IV, APPLICATION_X_INVENTOR); typesBySuffix.put(IVR, I_WORLD_I_VRML); typesBySuffix.put(IVY, APPLICATION_X_LIVESCREEN); typesBySuffix.put(JAVA, APPLICATION_JAVA); typesBySuffix.put(JAM, AUDIO_X_JAM); typesBySuffix.put(JCM, APPLICATION_X_JAVA_COMMERCE); typesBySuffix.put(JFIF, IMAGE_JPEG); typesBySuffix.put(JPS, IMAGE_X_JPS); typesBySuffix.put(JPEG, IMAGE_JPEG); typesBySuffix.put(JS, APPLICATION_X_JAVASCRIPT); typesBySuffix.put(JUT, IMAGE_JUTVISION); typesBySuffix.put(JSON, APPLICATION_JSON); typesBySuffix.put(KAR, AUDIO_MIDI); typesBySuffix.put(KSH, APPLICATION_X_KSH); typesBySuffix.put(LA, AUDIO_NSPAUDIO); typesBySuffix.put(LAM, AUDIO_X_LIVEAUDIO); typesBySuffix.put(LATEX, APPLICATION_X_LATEX); typesBySuffix.put(LHA, APPLICATION_LHA); typesBySuffix.put(LSP, APPLICATION_X_LISP); typesBySuffix.put(LSX, TEXT_X_LA_ASF); typesBySuffix.put(LZX, APPLICATION_LZX); typesBySuffix.put(M1V, VIDEO_MPEG); typesBySuffix.put(M2A, AUDIO_MPEG); typesBySuffix.put(M3U, AUDIO_X_MPEQURL); typesBySuffix.put(MAN, APPLICATION_X_TROFF_MAN); typesBySuffix.put(MAP, APPLICATION_X_NAVIMAP); typesBySuffix.put(MBD, APPLICATION_MBEDLET); typesBySuffix.put(MC$, APPLICATION_X_MAGIC_CAP_PACKAGE_1_0); typesBySuffix.put(MCD, APPLICATION_MCAD); typesBySuffix.put(MCF, IMAGE_VASA); typesBySuffix.put(MCP, APPLICATION_NETMC); typesBySuffix.put(ME, APPLICATION_X_TROFF_ME); typesBySuffix.put(MHT, MESSAGE_RFC822); typesBySuffix.put(MID, APPLICATION_X_MIDI); typesBySuffix.put(MIF, APPLICATION_X_FRAME); typesBySuffix.put(MJF, AUDIO_X_VND_AUDIOEXPLOSION_MJUICEMEDIAFILE); typesBySuffix.put(MJPG, VIDEO_X_MOTION_JPEG); typesBySuffix.put(MM, APPLICATION_BASE64); typesBySuffix.put(MOD, AUDIO_MOD); typesBySuffix.put(MOOV, VIDEO_QUICKTIME); typesBySuffix.put(MOVIE, VIDEO_X_SGI_MOVIE); typesBySuffix.put(MP3, AUDIO_MPEG3); typesBySuffix.put(MPC, APPLICATION_X_PROJECT); typesBySuffix.put(MPP, APPLICATION_VND_MS_PROJECT); typesBySuffix.put(MRC, APPLICATION_MARC); typesBySuffix.put(MS, APPLICATION_X_TROFF_MS); typesBySuffix.put(MZZ, APPLICATION_X_VND_AUDIOEXPLOSION_MZZ); typesBySuffix.put(NAP, IMAGE_NAPLPS); typesBySuffix.put(NCM, APPLICATION_VND_NOKIA_CONFIGURATION_MESSAGE); typesBySuffix.put(NIF, IMAGE_X_NIFF); typesBySuffix.put(NIX, APPLICATION_X_MIX_TRANSFER); typesBySuffix.put(NSC, APPLICATION_X_CONFERENCE); typesBySuffix.put(NVD, APPLICATION_X_NAVIDOC); typesBySuffix.put(ODA, APPLICATION_ODA); typesBySuffix.put(OMC, APPLICATION_X_OMC); typesBySuffix.put(OMCD, APPLICATION_X_OMCDATAMAKER); typesBySuffix.put(OMCR, APPLICATION_X_OMCREGERATOR); typesBySuffix.put(P, TEXT_X_PASCAL); typesBySuffix.put(P10, APPLICATION_PKCS10); typesBySuffix.put(P12, APPLICATION_PKCS_12); typesBySuffix.put(P7A, APPLICATION_X_PKCS7_SIGNATURE); typesBySuffix.put(P7C, APPLICATION_PKCS7_MIME); typesBySuffix.put(P7R, APPLICATION_X_PKCS7_CERTREQRESP); typesBySuffix.put(P7S, APPLICATION_PKCS7_SIGNATURE); typesBySuffix.put(PART, APPLICATION_PRO_ENG); typesBySuffix.put(PAS, TEXT_PASCAL); typesBySuffix.put(PBM, IMAGE_X_PORTABLE_BITMAP); typesBySuffix.put(PCL, APPLICATION_VND_HP_PCL); typesBySuffix.put(PCT, IMAGE_X_PICT); typesBySuffix.put(PCX, IMAGE_X_PCX); typesBySuffix.put(PDB, CHEMICAL_X_PDB); typesBySuffix.put(PDF, APPLICATION_PDF); typesBySuffix.put(PGM, IMAGE_X_PORTABLE_GRAYMAP); typesBySuffix.put(PIC, IMAGE_PICT); typesBySuffix.put(PKG, APPLICATION_X_NEWTON_COMPATIBLE_PKG); typesBySuffix.put(PKO, APPLICATION_VND_MS_PKI_PKO); typesBySuffix.put(PLX, APPLICATION_X_PIXCLSCRIPT); typesBySuffix.put(PM, IMAGE_X_XPIXMAP); typesBySuffix.put(PM4, APPLICATION_X_PAGEMAKER); typesBySuffix.put(PNG, IMAGE_PNG); typesBySuffix.put(PNM, APPLICATION_X_PORTABLE_ANYMAP); typesBySuffix.put(POT, APPLICATION_MSPOWERPOINT); typesBySuffix.put(POV, MODEL_X_POV); typesBySuffix.put(PPM, IMAGE_X_PORTABLE_PIXMAP); typesBySuffix.put(PRE, APPLICATION_X_FREELANCE); typesBySuffix.put(PVU, PALEOVU_X_PV); typesBySuffix.put(PY, TEXT_X_SCRIPT_PHYTON); typesBySuffix.put(PYC, APPLICATION_X_BYTECODE_PYTHON); typesBySuffix.put(QCP, AUDIO_VND_QCELP); typesBySuffix.put(QIF, IMAGE_X_QUICKTIME); typesBySuffix.put(QTC, VIDEO_X_QTC); typesBySuffix.put(RA, AUDIO_X_PN_REALAUDIO); typesBySuffix.put(RAS, APPLICATION_X_CMU_RASTER); typesBySuffix.put(REXX, TEXT_X_SCRIPT_REXX); typesBySuffix.put(RF, IMAGE_VND_RN_REALFLASH); typesBySuffix.put(RGB, IMAGE_X_RGB); typesBySuffix.put(RM, APPLICATION_VND_RN_REALMEDIA); typesBySuffix.put(RMI, AUDIO_MID); typesBySuffix.put(RNG, APPLICATION_RINGING_TONES); typesBySuffix.put(RNX, APPLICATION_VND_RN_REALPLAYER); typesBySuffix.put(ROFF, APPLICATION_X_TROFF); typesBySuffix.put(RP, IMAGE_VND_RN_REALPIX); typesBySuffix.put(RT, TEXT_RICHTEXT); typesBySuffix.put(RTF, APPLICATION_RTF); typesBySuffix.put(RV, VIDEO_VND_RN_REALVIDEO); typesBySuffix.put(S3M, AUDIO_S3M); typesBySuffix.put(SBK, APPLICATION_X_TBOOK); typesBySuffix.put(SCM, APPLICATION_X_LOTUSSCREENCAM); typesBySuffix.put(SDP, APPLICATION_SDP); typesBySuffix.put(SDR, APPLICATION_SOUNDER); typesBySuffix.put(SEA, APPLICATION_SEA); typesBySuffix.put(SET, APPLICATION_SET); typesBySuffix.put(SGM, TEXT_SGML); typesBySuffix.put(SID, AUDIO_X_PSID); typesBySuffix.put(SIT, APPLICATION_X_SIT); typesBySuffix.put(SKD, APPLICATION_X_KOAN); typesBySuffix.put(SL, APPLICATION_X_SEELOGO); typesBySuffix.put(SMI, APPLICATION_SMIL); typesBySuffix.put(SOL, APPLICATION_SOLIDS); typesBySuffix.put(SPC, APPLICATION_X_PKCS7_CERTIFICATES); typesBySuffix.put(SPL, APPLICATION_FUTURESPLASH); typesBySuffix.put(SPR, APPLICATION_X_SPRITE); typesBySuffix.put(SRC, APPLICATION_X_WAIS_SOURCE); typesBySuffix.put(SSM, APPLICATION_STREAMINGMEDIA); typesBySuffix.put(SST, APPLICATION_VND_MS_PKI_CERTSTORE); typesBySuffix.put(STEP, APPLICATION_STEP); typesBySuffix.put(STL, APPLICATION_SLA); typesBySuffix.put(SV4CPIO, APPLICATION_X_SV4CPIO); typesBySuffix.put(SV4CRC, APPLICATION_X_SV4CRC); typesBySuffix.put(SVR, APPLICATION_X_WORLD); typesBySuffix.put(SWF, APPLICATION_X_SHOCKWAVE_FLASH); typesBySuffix.put(TAR, APPLICATION_X_TAR); typesBySuffix.put(TBK, APPLICATION_TOOLBOOK); typesBySuffix.put(TCL, APPLICATION_X_TCL); typesBySuffix.put(TCSH, TEXT_X_SCRIPT_TCSH); typesBySuffix.put(TEX, APPLICATION_X_TEX); typesBySuffix.put(TEXI, APPLICATION_X_TEXINFO); typesBySuffix.put(TEXT, APPLICATION_PLAIN); typesBySuffix.put(TGZ, APPLICATION_GNUTAR); typesBySuffix.put(TIF, IMAGE_TIFF); typesBySuffix.put(TSI, AUDIO_TSP_AUDIO); typesBySuffix.put(TSP, APPLICATION_DSPTYPE); typesBySuffix.put(TSV, TEXT_TAB_SEPARATED_VALUES); typesBySuffix.put(UIL, TEXT_X_UIL); typesBySuffix.put(UNI, TEXT_URI_LIST); typesBySuffix.put(UNV, APPLICATION_I_DEAS); typesBySuffix.put(USTAR, APPLICATION_X_USTAR); typesBySuffix.put(VCD, APPLICATION_X_CDLINK); typesBySuffix.put(VCS, TEXT_X_VCALENDAR); typesBySuffix.put(VDA, APPLICATION_VDA); typesBySuffix.put(VDO, VIDEO_VDO); typesBySuffix.put(VEW, APPLICATION_GROUPWISE); typesBySuffix.put(VIV, VIDEO_VIVO); typesBySuffix.put(VMD, APPLICATION_VOCALTEC_MEDIA_DESC); typesBySuffix.put(VMF, APPLICATION_VOCALTEC_MEDIA_FILE); typesBySuffix.put(VOC, AUDIO_VOC); typesBySuffix.put(VOS, VIDEO_VOSAIC); typesBySuffix.put(VOX, AUDIO_VOXWARE); typesBySuffix.put(VQE, AUDIO_X_TWINVQ_PLUGIN); typesBySuffix.put(VQF, AUDIO_X_TWINVQ); typesBySuffix.put(VRML, APPLICATION_X_VRML); typesBySuffix.put(VRT, X_WORLD_X_VRT); typesBySuffix.put(VSD, APPLICATION_X_VISIO); typesBySuffix.put(W60, APPLICATION_WORDPERFECT6_0); typesBySuffix.put(W61, APPLICATION_WORDPERFECT6_1); typesBySuffix.put(WAV, AUDIO_WAV); typesBySuffix.put(WB1, APPLICATION_X_QPRO); typesBySuffix.put(WBMP, IMAGE_VND_WAP_WBMP); typesBySuffix.put(WEB, APPLICATION_VND_XARA); typesBySuffix.put(WK1, APPLICATION_X_123); typesBySuffix.put(WMF, WINDOWS_METAFILE); typesBySuffix.put(WML, TEXT_VND_WAP_WML); typesBySuffix.put(WMLC, APPLICATION_VND_WAP_WMLC); typesBySuffix.put(WMLS, TEXT_VND_WAP_WMLSCRIPT); typesBySuffix.put(WMLSC, APPLICATION_VND_WAP_WMLSCRIPTC); typesBySuffix.put(WP, APPLICATION_WORDPERFECT); typesBySuffix.put(WQ1, APPLICATION_X_LOTUS); typesBySuffix.put(WRI, APPLICATION_MSWRITE); typesBySuffix.put(WSC, TEXT_SCRIPLET); typesBySuffix.put(WTK, APPLICATION_X_WINTALK); typesBySuffix.put(XBM, IMAGE_X_XBITMAP); typesBySuffix.put(XDR, VIDEO_X_AMT_DEMORUN); typesBySuffix.put(XGZ, XGL_DRAWING); typesBySuffix.put(XIF, IMAGE_VND_XIFF); typesBySuffix.put(XL, APPLICATION_EXCEL); typesBySuffix.put(XM, AUDIO_XM); typesBySuffix.put(XML, APPLICATION_XML); typesBySuffix.put(XMZ, XGL_MOVIE); typesBySuffix.put(XPIX, APPLICATION_X_VND_LS_XPIX); typesBySuffix.put(XSR, VIDEO_X_AMT_SHOWRUN); typesBySuffix.put(XWD, IMAGE_X_XWD); typesBySuffix.put(Z, APPLICATION_X_COMPRESS); typesBySuffix.put(ZSH, TEXT_X_SCRIPT_ZSH); } private final String text; private MimeType(String text) { this.text = text; } protected MimeType(String text, String... suffixes) { this(text); types.put(text, this); for(String suffix : suffixes) { typesBySuffix.put(suffix, this); } } @Override public final String toString() { return text; } /** * Return the type indexed by the suffix * @param suffix Suffix to find the type. * @return Mime type founded. */ public static MimeType fromSuffix(String suffix) { return typesBySuffix.get(suffix); } /** * Return the type indexed by the suffix * @param suffix Suffix to find the type. * @return Mime type founded. */ public static MimeType fromSuffixIgnoreCase(String suffix) { MimeType result = null; for(String suffixKey : typesBySuffix.keySet()) { if(suffixKey.equalsIgnoreCase(suffix)) { result = typesBySuffix.get(suffixKey); break; } } return result; } /** * Return the type indexed by the text. * @param text Text to find the type. * @return Mime type founded. */ public static MimeType fromString(String text) { return types.get(text); } /** * Return the type indexed by the text. * @param text Text to find the type. * @return Mime type founded. */ public static MimeType fromStringIgnoreCase(String text) { MimeType result = null; for(String textKey : types.keySet()) { if(textKey.equalsIgnoreCase(text)) { result = types.get(textKey); break; } } return result; } }
src/main/java/org/hcjf/encoding/MimeType.java
package org.hcjf.encoding; import java.util.HashMap; import java.util.Map; /** * Contains commons mime types. * @author javaito */ public class MimeType { public static final MimeType APPLICATION_ACAD = new MimeType("application/acad"); public static final MimeType APPLICATION_ARJ = new MimeType("application/arj"); public static final MimeType APPLICATION_BASE64 = new MimeType("application/base64"); public static final MimeType APPLICATION_BINHEX = new MimeType("application/binhex"); public static final MimeType APPLICATION_BINHEX4 = new MimeType("application/binhex4"); public static final MimeType APPLICATION_BOOK = new MimeType("application/book"); public static final MimeType APPLICATION_CDF = new MimeType("application/cdf"); public static final MimeType APPLICATION_CLARISCAD = new MimeType("application/clariscad"); public static final MimeType APPLICATION_COMMONGROUND = new MimeType("application/commonground"); public static final MimeType APPLICATION_DRAFTING = new MimeType("application/drafting"); public static final MimeType APPLICATION_DSPTYPE = new MimeType("application/dsptype"); public static final MimeType APPLICATION_DXF = new MimeType("application/dxf"); public static final MimeType APPLICATION_ECMASCRIPT = new MimeType("application/ecmascript"); public static final MimeType APPLICATION_ENVOY = new MimeType("application/envoy"); public static final MimeType APPLICATION_EXCEL = new MimeType("application/excel"); public static final MimeType APPLICATION_FRACTALS = new MimeType("application/fractals"); public static final MimeType APPLICATION_FREELOADER = new MimeType("application/freeloader"); public static final MimeType APPLICATION_FUTURESPLASH = new MimeType("application/futuresplash"); public static final MimeType APPLICATION_GNUTAR = new MimeType("application/gnutar"); public static final MimeType APPLICATION_GROUPWISE = new MimeType("application/groupwise"); public static final MimeType APPLICATION_HLP = new MimeType("application/hlp"); public static final MimeType APPLICATION_HTA = new MimeType("application/hta"); public static final MimeType APPLICATION_IGES = new MimeType("application/iges"); public static final MimeType APPLICATION_INF = new MimeType("application/inf"); public static final MimeType APPLICATION_I_DEAS = new MimeType("application/i-deas"); public static final MimeType APPLICATION_JAVA = new MimeType("application/java"); public static final MimeType APPLICATION_JAVASCRIPT = new MimeType("application/javascript"); public static final MimeType APPLICATION_JAVA_BYTE_CODE = new MimeType("application/java-byte-code"); public static final MimeType APPLICATION_JSON = new MimeType("application/json"); public static final MimeType APPLICATION_LHA = new MimeType("application/lha"); public static final MimeType APPLICATION_LZX = new MimeType("application/lzx"); public static final MimeType APPLICATION_MACBINARY = new MimeType("application/macbinary"); public static final MimeType APPLICATION_MAC_BINARY = new MimeType("application/mac-binary"); public static final MimeType APPLICATION_MAC_BINHEX = new MimeType("application/mac-binhex"); public static final MimeType APPLICATION_MAC_BINHEX40 = new MimeType("application/mac-binhex40"); public static final MimeType APPLICATION_MAC_COMPACTPRO = new MimeType("application/mac-compactpro"); public static final MimeType APPLICATION_MARC = new MimeType("application/marc"); public static final MimeType APPLICATION_MBEDLET = new MimeType("application/mbedlet"); public static final MimeType APPLICATION_MCAD = new MimeType("application/mcad"); public static final MimeType APPLICATION_MIME = new MimeType("application/mime"); public static final MimeType APPLICATION_MSPOWERPOINT = new MimeType("application/mspowerpoint"); public static final MimeType APPLICATION_MSWORD = new MimeType("application/msword"); public static final MimeType APPLICATION_MSWRITE = new MimeType("application/mswrite"); public static final MimeType APPLICATION_NETMC = new MimeType("application/netmc"); public static final MimeType APPLICATION_OCTET_STREAM = new MimeType("application/octet-stream"); public static final MimeType APPLICATION_ODA = new MimeType("application/oda"); public static final MimeType APPLICATION_PDF = new MimeType("application/pdf"); public static final MimeType APPLICATION_PKCS10 = new MimeType("application/pkcs10"); public static final MimeType APPLICATION_PKCS7_MIME = new MimeType("application/pkcs7-mime"); public static final MimeType APPLICATION_PKCS7_SIGNATURE = new MimeType("application/pkcs7-signature"); public static final MimeType APPLICATION_PKCS_12 = new MimeType("application/pkcs-12"); public static final MimeType APPLICATION_PKCS_CRL = new MimeType("application/pkcs-crl"); public static final MimeType APPLICATION_PKIX_CERT = new MimeType("application/pkix-cert"); public static final MimeType APPLICATION_PKIX_CRL = new MimeType("application/pkix-crl"); public static final MimeType APPLICATION_PLAIN = new MimeType("application/plain"); public static final MimeType APPLICATION_POSTSCRIPT = new MimeType("application/postscript"); public static final MimeType APPLICATION_POWERPOINT = new MimeType("application/powerpoint"); public static final MimeType APPLICATION_PRO_ENG = new MimeType("application/pro_eng"); public static final MimeType APPLICATION_RINGING_TONES = new MimeType("application/ringing-tones"); public static final MimeType APPLICATION_RTF = new MimeType("application/rtf"); public static final MimeType APPLICATION_SDP = new MimeType("application/sdp"); public static final MimeType APPLICATION_SEA = new MimeType("application/sea"); public static final MimeType APPLICATION_SET = new MimeType("application/set"); public static final MimeType APPLICATION_SLA = new MimeType("application/sla"); public static final MimeType APPLICATION_SMIL = new MimeType("application/smil"); public static final MimeType APPLICATION_SOAP_XML = new MimeType("application/soap+xml"); public static final MimeType APPLICATION_SOLIDS = new MimeType("application/solids"); public static final MimeType APPLICATION_SOUNDER = new MimeType("application/sounder"); public static final MimeType APPLICATION_STEP = new MimeType("application/step"); public static final MimeType APPLICATION_STREAMINGMEDIA = new MimeType("application/streamingmedia"); public static final MimeType APPLICATION_TOOLBOOK = new MimeType("application/toolbook"); public static final MimeType APPLICATION_VDA = new MimeType("application/vda"); public static final MimeType APPLICATION_VND_FDF = new MimeType("application/vnd.fdf"); public static final MimeType APPLICATION_VND_HP_HPGL = new MimeType("application/vnd.hp-hpgl"); public static final MimeType APPLICATION_VND_HP_PCL = new MimeType("application/vnd.hp-pcl"); public static final MimeType APPLICATION_VND_MS_EXCEL = new MimeType("application/vnd.ms-excel"); public static final MimeType APPLICATION_VND_MS_PKI_CERTSTORE = new MimeType("application/vnd.ms-pki.certstore"); public static final MimeType APPLICATION_VND_MS_PKI_PKO = new MimeType("application/vnd.ms-pki.pko"); public static final MimeType APPLICATION_VND_MS_PKI_SECCAT = new MimeType("application/vnd.ms-pki.seccat"); public static final MimeType APPLICATION_VND_MS_PKI_STL = new MimeType("application/vnd.ms-pki.stl"); public static final MimeType APPLICATION_VND_MS_POWERPOINT = new MimeType("application/vnd.ms-powerpoint"); public static final MimeType APPLICATION_VND_MS_PROJECT = new MimeType("application/vnd.ms-project"); public static final MimeType APPLICATION_VND_NOKIA_CONFIGURATION_MESSAGE = new MimeType("application/vnd.nokia.configuration-message"); public static final MimeType APPLICATION_VND_NOKIA_RINGING_TONE = new MimeType("application/vnd.nokia.ringing-tone"); public static final MimeType APPLICATION_VND_RN_REALMEDIA = new MimeType("application/vnd.rn-realmedia"); public static final MimeType APPLICATION_VND_RN_REALPLAYER = new MimeType("application/vnd.rn-realplayer"); public static final MimeType APPLICATION_VND_WAP_WMLC = new MimeType("application/vnd.wap.wmlc"); public static final MimeType APPLICATION_VND_WAP_WMLSCRIPTC = new MimeType("application/vnd.wap.wmlscriptc"); public static final MimeType APPLICATION_VND_XARA = new MimeType("application/vnd.xara"); public static final MimeType APPLICATION_VOCALTEC_MEDIA_DESC = new MimeType("application/vocaltec-media-desc"); public static final MimeType APPLICATION_VOCALTEC_MEDIA_FILE = new MimeType("application/vocaltec-media-file"); public static final MimeType APPLICATION_WORDPERFECT = new MimeType("application/wordperfect"); public static final MimeType APPLICATION_WORDPERFECT6_0 = new MimeType("application/wordperfect6.0"); public static final MimeType APPLICATION_WORDPERFECT6_1 = new MimeType("application/wordperfect6.1"); public static final MimeType APPLICATION_XML = new MimeType("application/xml"); public static final MimeType APPLICATION_X_123 = new MimeType("application/x-123"); public static final MimeType APPLICATION_X_AIM = new MimeType("application/x-aim"); public static final MimeType APPLICATION_X_AUTHORWARE_BIN = new MimeType("application/x-authorware-bin"); public static final MimeType APPLICATION_X_AUTHORWARE_MAP = new MimeType("application/x-authorware-map"); public static final MimeType APPLICATION_X_AUTHORWARE_SEG = new MimeType("application/x-authorware-seg"); public static final MimeType APPLICATION_X_BCPIO = new MimeType("application/x-bcpio"); public static final MimeType APPLICATION_X_BINARY = new MimeType("application/x-binary"); public static final MimeType APPLICATION_X_BINHEX40 = new MimeType("application/x-binhex40"); public static final MimeType APPLICATION_X_BSH = new MimeType("application/x-bsh"); public static final MimeType APPLICATION_X_BYTECODE_ELISP = new MimeType("application/x-bytecode.elisp"); public static final MimeType APPLICATION_X_BYTECODE_PYTHON = new MimeType("application/x-bytecode.python"); public static final MimeType APPLICATION_X_BZIP = new MimeType("application/x-bzip"); public static final MimeType APPLICATION_X_BZIP2 = new MimeType("application/x-bzip2"); public static final MimeType APPLICATION_X_CDF = new MimeType("application/x-cdf"); public static final MimeType APPLICATION_X_CDLINK = new MimeType("application/x-cdlink"); public static final MimeType APPLICATION_X_CHAT = new MimeType("application/x-chat"); public static final MimeType APPLICATION_X_CMU_RASTER = new MimeType("application/x-cmu-raster"); public static final MimeType APPLICATION_X_COCOA = new MimeType("application/x-cocoa"); public static final MimeType APPLICATION_X_COMPACTPRO = new MimeType("application/x-compactpro"); public static final MimeType APPLICATION_X_COMPRESS = new MimeType("application/x-compress"); public static final MimeType APPLICATION_X_COMPRESSED = new MimeType("application/x-compressed"); public static final MimeType APPLICATION_X_CONFERENCE = new MimeType("application/x-conference"); public static final MimeType APPLICATION_X_CPIO = new MimeType("application/x-cpio"); public static final MimeType APPLICATION_X_CPT = new MimeType("application/x-cpt"); public static final MimeType APPLICATION_X_CSH = new MimeType("application/x-csh"); public static final MimeType APPLICATION_X_DEEPV = new MimeType("application/x-deepv"); public static final MimeType APPLICATION_X_DIRECTOR = new MimeType("application/x-director"); public static final MimeType APPLICATION_X_DVI = new MimeType("application/x-dvi"); public static final MimeType APPLICATION_X_ELC = new MimeType("application/x-elc"); public static final MimeType APPLICATION_X_ENVOY = new MimeType("application/x-envoy"); public static final MimeType APPLICATION_X_ESREHBER = new MimeType("application/x-esrehber"); public static final MimeType APPLICATION_X_EXCEL = new MimeType("application/x-excel"); public static final MimeType APPLICATION_X_FRAME = new MimeType("application/x-frame"); public static final MimeType APPLICATION_X_FREELANCE = new MimeType("application/x-freelance"); public static final MimeType APPLICATION_X_GSP = new MimeType("application/x-gsp"); public static final MimeType APPLICATION_X_GSS = new MimeType("application/x-gss"); public static final MimeType APPLICATION_X_GTAR = new MimeType("application/x-gtar"); public static final MimeType APPLICATION_X_GZIP = new MimeType("application/x-gzip"); public static final MimeType APPLICATION_X_HDF = new MimeType("application/x-hdf"); public static final MimeType APPLICATION_X_HELPFILE = new MimeType("application/x-helpfile"); public static final MimeType APPLICATION_X_HTTPD_IMAP = new MimeType("application/x-httpd-imap"); public static final MimeType APPLICATION_X_IMA = new MimeType("application/x-ima"); public static final MimeType APPLICATION_X_INTERNETT_SIGNUP = new MimeType("application/x-internett-signup"); public static final MimeType APPLICATION_X_INVENTOR = new MimeType("application/x-inventor"); public static final MimeType APPLICATION_X_IP2 = new MimeType("application/x-ip2"); public static final MimeType APPLICATION_X_JAVASCRIPT = new MimeType("application/x-javascript"); public static final MimeType APPLICATION_X_JAVA_CLASS = new MimeType("application/x-java-class"); public static final MimeType APPLICATION_X_JAVA_COMMERCE = new MimeType("application/x-java-commerce"); public static final MimeType APPLICATION_X_KOAN = new MimeType("application/x-koan"); public static final MimeType APPLICATION_X_KSH = new MimeType("application/x-ksh"); public static final MimeType APPLICATION_X_LATEX = new MimeType("application/x-latex"); public static final MimeType APPLICATION_X_LHA = new MimeType("application/x-lha"); public static final MimeType APPLICATION_X_LISP = new MimeType("application/x-lisp"); public static final MimeType APPLICATION_X_LIVESCREEN = new MimeType("application/x-livescreen"); public static final MimeType APPLICATION_X_LOTUS = new MimeType("application/x-lotus"); public static final MimeType APPLICATION_X_LOTUSSCREENCAM = new MimeType("application/x-lotusscreencam"); public static final MimeType APPLICATION_X_LZH = new MimeType("application/x-lzh"); public static final MimeType APPLICATION_X_LZX = new MimeType("application/x-lzx"); public static final MimeType APPLICATION_X_MACBINARY = new MimeType("application/x-macbinary"); public static final MimeType APPLICATION_X_MAC_BINHEX40 = new MimeType("application/x-mac-binhex40"); public static final MimeType APPLICATION_X_MAGIC_CAP_PACKAGE_1_0 = new MimeType("application/x-magic-cap-package-1.0"); public static final MimeType APPLICATION_X_MATHCAD = new MimeType("application/x-mathcad"); public static final MimeType APPLICATION_X_MEME = new MimeType("application/x-meme"); public static final MimeType APPLICATION_X_MIDI = new MimeType("application/x-midi"); public static final MimeType APPLICATION_X_MIF = new MimeType("application/x-mif"); public static final MimeType APPLICATION_X_MIX_TRANSFER = new MimeType("application/x-mix-transfer"); public static final MimeType APPLICATION_X_MPLAYER2 = new MimeType("application/x-mplayer2"); public static final MimeType APPLICATION_X_MSEXCEL = new MimeType("application/x-msexcel"); public static final MimeType APPLICATION_X_MSPOWERPOINT = new MimeType("application/x-mspowerpoint"); public static final MimeType APPLICATION_X_NAVIDOC = new MimeType("application/x-navidoc"); public static final MimeType APPLICATION_X_NAVIMAP = new MimeType("application/x-navimap"); public static final MimeType APPLICATION_X_NAVISTYLE = new MimeType("application/x-navistyle"); public static final MimeType APPLICATION_X_NAVI_ANIMATION = new MimeType("application/x-navi-animation"); public static final MimeType APPLICATION_X_NETCDF = new MimeType("application/x-netcdf"); public static final MimeType APPLICATION_X_NEWTON_COMPATIBLE_PKG = new MimeType("application/x-newton-compatible-pkg"); public static final MimeType APPLICATION_X_NOKIA_9000_COMMUNICATOR_ADD_ON_SOFTWARE = new MimeType("application/x-nokia-9000-communicator-add-on-software"); public static final MimeType APPLICATION_X_OMC = new MimeType("application/x-omc"); public static final MimeType APPLICATION_X_OMCDATAMAKER = new MimeType("application/x-omcdatamaker"); public static final MimeType APPLICATION_X_OMCREGERATOR = new MimeType("application/x-omcregerator"); public static final MimeType APPLICATION_X_PAGEMAKER = new MimeType("application/x-pagemaker"); public static final MimeType APPLICATION_X_PCL = new MimeType("application/x-pcl"); public static final MimeType APPLICATION_X_PIXCLSCRIPT = new MimeType("application/x-pixclscript"); public static final MimeType APPLICATION_X_PKCS10 = new MimeType("application/x-pkcs10"); public static final MimeType APPLICATION_X_PKCS12 = new MimeType("application/x-pkcs12"); public static final MimeType APPLICATION_X_PKCS7_CERTIFICATES = new MimeType("application/x-pkcs7-certificates"); public static final MimeType APPLICATION_X_PKCS7_CERTREQRESP = new MimeType("application/x-pkcs7-certreqresp"); public static final MimeType APPLICATION_X_PKCS7_MIME = new MimeType("application/x-pkcs7-mime"); public static final MimeType APPLICATION_X_PKCS7_SIGNATURE = new MimeType("application/x-pkcs7-signature"); public static final MimeType APPLICATION_X_POINTPLUS = new MimeType("application/x-pointplus"); public static final MimeType APPLICATION_X_PORTABLE_ANYMAP = new MimeType("application/x-portable-anymap"); public static final MimeType APPLICATION_X_PROJECT = new MimeType("application/x-project"); public static final MimeType APPLICATION_X_QPRO = new MimeType("application/x-qpro"); public static final MimeType APPLICATION_X_RTF = new MimeType("application/x-rtf"); public static final MimeType APPLICATION_X_SDP = new MimeType("application/x-sdp"); public static final MimeType APPLICATION_X_SEA = new MimeType("application/x-sea"); public static final MimeType APPLICATION_X_SEELOGO = new MimeType("application/x-seelogo"); public static final MimeType APPLICATION_X_SH = new MimeType("application/x-sh"); public static final MimeType APPLICATION_X_SHAR = new MimeType("application/x-shar"); public static final MimeType APPLICATION_X_SHOCKWAVE_FLASH = new MimeType("application/x-shockwave-flash"); public static final MimeType APPLICATION_X_SIT = new MimeType("application/x-sit"); public static final MimeType APPLICATION_X_SPRITE = new MimeType("application/x-sprite"); public static final MimeType APPLICATION_X_STUFFIT = new MimeType("application/x-stuffit"); public static final MimeType APPLICATION_X_SV4CPIO = new MimeType("application/x-sv4cpio"); public static final MimeType APPLICATION_X_SV4CRC = new MimeType("application/x-sv4crc"); public static final MimeType APPLICATION_X_TAR = new MimeType("application/x-tar"); public static final MimeType APPLICATION_X_TBOOK = new MimeType("application/x-tbook"); public static final MimeType APPLICATION_X_TCL = new MimeType("application/x-tcl"); public static final MimeType APPLICATION_X_TEX = new MimeType("application/x-tex"); public static final MimeType APPLICATION_X_TEXINFO = new MimeType("application/x-texinfo"); public static final MimeType APPLICATION_X_TROFF = new MimeType("application/x-troff"); public static final MimeType APPLICATION_X_TROFF_MAN = new MimeType("application/x-troff-man"); public static final MimeType APPLICATION_X_TROFF_ME = new MimeType("application/x-troff-me"); public static final MimeType APPLICATION_X_TROFF_MS = new MimeType("application/x-troff-ms"); public static final MimeType APPLICATION_X_TROFF_MSVIDEO = new MimeType("application/x-troff-msvideo"); public static final MimeType APPLICATION_X_USTAR = new MimeType("application/x-ustar"); public static final MimeType APPLICATION_X_VISIO = new MimeType("application/x-visio"); public static final MimeType APPLICATION_X_VND_AUDIOEXPLOSION_MZZ = new MimeType("application/x-vnd.audioexplosion.mzz"); public static final MimeType APPLICATION_X_VND_LS_XPIX = new MimeType("application/x-vnd.ls-xpix"); public static final MimeType APPLICATION_X_VRML = new MimeType("application/x-vrml"); public static final MimeType APPLICATION_X_WAIS_SOURCE = new MimeType("application/x-wais-source"); public static final MimeType APPLICATION_X_WINHELP = new MimeType("application/x-winhelp"); public static final MimeType APPLICATION_X_WINTALK = new MimeType("application/x-wintalk"); public static final MimeType APPLICATION_X_WORLD = new MimeType("application/x-world"); public static final MimeType APPLICATION_X_WPWIN = new MimeType("application/x-wpwin"); public static final MimeType APPLICATION_X_WRI = new MimeType("application/x-wri"); public static final MimeType APPLICATION_X_X509_CA_CERT = new MimeType("application/x-x509-ca-cert"); public static final MimeType APPLICATION_X_X509_USER_CERT = new MimeType("application/x-x509-user-cert"); public static final MimeType APPLICATION_X_ZIP_COMPRESSED = new MimeType("application/x-zip-compressed"); public static final MimeType APPLICATION_YAML = new MimeType("application/yaml"); public static final MimeType APPLICATION_ZIP = new MimeType("application/zip"); public static final MimeType AUDIO_AIFF = new MimeType("audio/aiff"); public static final MimeType AUDIO_BASIC = new MimeType("audio/basic"); public static final MimeType AUDIO_IT = new MimeType("audio/it"); public static final MimeType AUDIO_MAKE = new MimeType("audio/make"); public static final MimeType AUDIO_MAKE_MY_FUNK = new MimeType("audio/make.my.funk"); public static final MimeType AUDIO_MID = new MimeType("audio/mid"); public static final MimeType AUDIO_MIDI = new MimeType("audio/midi"); public static final MimeType AUDIO_MOD = new MimeType("audio/mod"); public static final MimeType AUDIO_MPEG = new MimeType("audio/mpeg"); public static final MimeType AUDIO_MPEG3 = new MimeType("audio/mpeg3"); public static final MimeType AUDIO_NSPAUDIO = new MimeType("audio/nspaudio"); public static final MimeType AUDIO_S3M = new MimeType("audio/s3m"); public static final MimeType AUDIO_TSPLAYER = new MimeType("audio/tsplayer"); public static final MimeType AUDIO_TSP_AUDIO = new MimeType("audio/tsp-audio"); public static final MimeType AUDIO_VND_QCELP = new MimeType("audio/vnd.qcelp"); public static final MimeType AUDIO_VOC = new MimeType("audio/voc"); public static final MimeType AUDIO_VOXWARE = new MimeType("audio/voxware"); public static final MimeType AUDIO_WAV = new MimeType("audio/wav"); public static final MimeType AUDIO_XM = new MimeType("audio/xm"); public static final MimeType AUDIO_X_ADPCM = new MimeType("audio/x-adpcm"); public static final MimeType AUDIO_X_AIFF = new MimeType("audio/x-aiff"); public static final MimeType AUDIO_X_AU = new MimeType("audio/x-au"); public static final MimeType AUDIO_X_GSM = new MimeType("audio/x-gsm"); public static final MimeType AUDIO_X_JAM = new MimeType("audio/x-jam"); public static final MimeType AUDIO_X_LIVEAUDIO = new MimeType("audio/x-liveaudio"); public static final MimeType AUDIO_X_MID = new MimeType("audio/x-mid"); public static final MimeType AUDIO_X_MIDI = new MimeType("audio/x-midi"); public static final MimeType AUDIO_X_MOD = new MimeType("audio/x-mod"); public static final MimeType AUDIO_X_MPEG = new MimeType("audio/x-mpeg"); public static final MimeType AUDIO_X_MPEG_3 = new MimeType("audio/x-mpeg-3"); public static final MimeType AUDIO_X_MPEQURL = new MimeType("audio/x-mpequrl"); public static final MimeType AUDIO_X_NSPAUDIO = new MimeType("audio/x-nspaudio"); public static final MimeType AUDIO_X_PN_REALAUDIO = new MimeType("audio/x-pn-realaudio"); public static final MimeType AUDIO_X_PN_REALAUDIO_PLUGIN = new MimeType("audio/x-pn-realaudio-plugin"); public static final MimeType AUDIO_X_PSID = new MimeType("audio/x-psid"); public static final MimeType AUDIO_X_REALAUDIO = new MimeType("audio/x-realaudio"); public static final MimeType AUDIO_X_TWINVQ = new MimeType("audio/x-twinvq"); public static final MimeType AUDIO_X_TWINVQ_PLUGIN = new MimeType("audio/x-twinvq-plugin"); public static final MimeType AUDIO_X_VND_AUDIOEXPLOSION_MJUICEMEDIAFILE = new MimeType("audio/x-vnd.audioexplosion.mjuicemediafile"); public static final MimeType AUDIO_X_VOC = new MimeType("audio/x-voc"); public static final MimeType AUDIO_X_WAV = new MimeType("audio/x-wav"); public static final MimeType CHEMICAL_X_PDB = new MimeType("chemical/x-pdb"); public static final MimeType DRAWING_X_DWF = new MimeType("drawing/x-dwf"); public static final MimeType IMAGE_BMP = new MimeType("image/bmp"); public static final MimeType IMAGE_CMU_RASTER = new MimeType("image/cmu-raster"); public static final MimeType IMAGE_FIF = new MimeType("image/fif"); public static final MimeType IMAGE_FLORIAN = new MimeType("image/florian"); public static final MimeType IMAGE_G3FAX = new MimeType("image/g3fax"); public static final MimeType IMAGE_GIF = new MimeType("image/gif"); public static final MimeType IMAGE_IEF = new MimeType("image/ief"); public static final MimeType IMAGE_JPEG = new MimeType("image/jpeg"); public static final MimeType IMAGE_JUTVISION = new MimeType("image/jutvision"); public static final MimeType IMAGE_NAPLPS = new MimeType("image/naplps"); public static final MimeType IMAGE_PICT = new MimeType("image/pict"); public static final MimeType IMAGE_PJPEG = new MimeType("image/pjpeg"); public static final MimeType IMAGE_PNG = new MimeType("image/png"); public static final MimeType IMAGE_TIFF = new MimeType("image/tiff"); public static final MimeType IMAGE_VASA = new MimeType("image/vasa"); public static final MimeType IMAGE_VND_DWG = new MimeType("image/vnd.dwg"); public static final MimeType IMAGE_VND_FPX = new MimeType("image/vnd.fpx"); public static final MimeType IMAGE_VND_NET_FPX = new MimeType("image/vnd.net-fpx"); public static final MimeType IMAGE_VND_RN_REALFLASH = new MimeType("image/vnd.rn-realflash"); public static final MimeType IMAGE_VND_RN_REALPIX = new MimeType("image/vnd.rn-realpix"); public static final MimeType IMAGE_VND_WAP_WBMP = new MimeType("image/vnd.wap.wbmp"); public static final MimeType IMAGE_VND_XIFF = new MimeType("image/vnd.xiff"); public static final MimeType IMAGE_XBM = new MimeType("image/xbm"); public static final MimeType IMAGE_XPM = new MimeType("image/xpm"); public static final MimeType IMAGE_X_CMU_RASTER = new MimeType("image/x-cmu-raster"); public static final MimeType IMAGE_X_DWG = new MimeType("image/x-dwg"); public static final MimeType IMAGE_X_ICON = new MimeType("image/x-icon"); public static final MimeType IMAGE_X_JG = new MimeType("image/x-jg"); public static final MimeType IMAGE_X_JPS = new MimeType("image/x-jps"); public static final MimeType IMAGE_X_NIFF = new MimeType("image/x-niff"); public static final MimeType IMAGE_X_PCX = new MimeType("image/x-pcx"); public static final MimeType IMAGE_X_PICT = new MimeType("image/x-pict"); public static final MimeType IMAGE_X_PORTABLE_ANYMAP = new MimeType("image/x-portable-anymap"); public static final MimeType IMAGE_X_PORTABLE_BITMAP = new MimeType("image/x-portable-bitmap"); public static final MimeType IMAGE_X_PORTABLE_GRAYMAP = new MimeType("image/x-portable-graymap"); public static final MimeType IMAGE_X_PORTABLE_GREYMAP = new MimeType("image/x-portable-greymap"); public static final MimeType IMAGE_X_PORTABLE_PIXMAP = new MimeType("image/x-portable-pixmap"); public static final MimeType IMAGE_X_QUICKTIME = new MimeType("image/x-quicktime"); public static final MimeType IMAGE_X_RGB = new MimeType("image/x-rgb"); public static final MimeType IMAGE_X_TIFF = new MimeType("image/x-tiff"); public static final MimeType IMAGE_X_WINDOWS_BMP = new MimeType("image/x-windows-bmp"); public static final MimeType IMAGE_X_XBITMAP = new MimeType("image/x-xbitmap"); public static final MimeType IMAGE_X_XBM = new MimeType("image/x-xbm"); public static final MimeType IMAGE_X_XPIXMAP = new MimeType("image/x-xpixmap"); public static final MimeType IMAGE_X_XWD = new MimeType("image/x-xwd"); public static final MimeType IMAGE_X_XWINDOWDUMP = new MimeType("image/x-xwindowdump"); public static final MimeType I_WORLD_I_VRML = new MimeType("i-world/i-vrml"); public static final MimeType MESSAGE_RFC822 = new MimeType("message/rfc822"); public static final MimeType MODEL_IGES = new MimeType("model/iges"); public static final MimeType MODEL_VND_DWF = new MimeType("model/vnd.dwf"); public static final MimeType MODEL_VRML = new MimeType("model/vrml"); public static final MimeType MODEL_X_POV = new MimeType("model/x-pov"); public static final MimeType MULTIPART_X_GZIP = new MimeType("multipart/x-gzip"); public static final MimeType MULTIPART_X_USTAR = new MimeType("multipart/x-ustar"); public static final MimeType MULTIPART_X_ZIP = new MimeType("multipart/x-zip"); public static final MimeType MUSIC_CRESCENDO = new MimeType("music/crescendo"); public static final MimeType MUSIC_X_KARAOKE = new MimeType("music/x-karaoke"); public static final MimeType PALEOVU_X_PV = new MimeType("paleovu/x-pv"); public static final MimeType TEXT_ASP = new MimeType("text/asp"); public static final MimeType TEXT_CSS = new MimeType("text/css"); public static final MimeType TEXT_ECMASCRIPT = new MimeType("text/ecmascript"); public static final MimeType TEXT_HTML = new MimeType("text/html"); public static final MimeType TEXT_JAVASCRIPT = new MimeType("text/javascript"); public static final MimeType TEXT_MCF = new MimeType("text/mcf"); public static final MimeType TEXT_PASCAL = new MimeType("text/pascal"); public static final MimeType TEXT_PLAIN = new MimeType("text/plain"); public static final MimeType TEXT_RICHTEXT = new MimeType("text/richtext"); public static final MimeType TEXT_SCRIPLET = new MimeType("text/scriplet"); public static final MimeType TEXT_SGML = new MimeType("text/sgml"); public static final MimeType TEXT_TAB_SEPARATED_VALUES = new MimeType("text/tab-separated-values"); public static final MimeType TEXT_URI_LIST = new MimeType("text/uri-list"); public static final MimeType TEXT_VND_ABC = new MimeType("text/vnd.abc"); public static final MimeType TEXT_VND_FMI_FLEXSTOR = new MimeType("text/vnd.fmi.flexstor"); public static final MimeType TEXT_VND_RN_REALTEXT = new MimeType("text/vnd.rn-realtext"); public static final MimeType TEXT_VND_WAP_WML = new MimeType("text/vnd.wap.wml"); public static final MimeType TEXT_VND_WAP_WMLSCRIPT = new MimeType("text/vnd.wap.wmlscript"); public static final MimeType TEXT_WEBVIEWHTML = new MimeType("text/webviewhtml"); public static final MimeType TEXT_XML = new MimeType("text/xml"); public static final MimeType TEXT_X_ASM = new MimeType("text/x-asm"); public static final MimeType TEXT_X_AUDIOSOFT_INTRA = new MimeType("text/x-audiosoft-intra"); public static final MimeType TEXT_X_C = new MimeType("text/x-c"); public static final MimeType TEXT_X_COMPONENT = new MimeType("text/x-component"); public static final MimeType TEXT_X_FORTRAN = new MimeType("text/x-fortran"); public static final MimeType TEXT_X_H = new MimeType("text/x-h"); public static final MimeType TEXT_X_JAVA_SOURCE = new MimeType("text/x-java-source"); public static final MimeType TEXT_X_LA_ASF = new MimeType("text/x-la-asf"); public static final MimeType TEXT_X_M = new MimeType("text/x-m"); public static final MimeType TEXT_X_PASCAL = new MimeType("text/x-pascal"); public static final MimeType TEXT_X_SCRIPT = new MimeType("text/x-script"); public static final MimeType TEXT_X_SCRIPT_CSH = new MimeType("text/x-script.csh"); public static final MimeType TEXT_X_SCRIPT_ELISP = new MimeType("text/x-script.elisp"); public static final MimeType TEXT_X_SCRIPT_GUILE = new MimeType("text/x-script.guile"); public static final MimeType TEXT_X_SCRIPT_KSH = new MimeType("text/x-script.ksh"); public static final MimeType TEXT_X_SCRIPT_LISP = new MimeType("text/x-script.lisp"); public static final MimeType TEXT_X_SCRIPT_PERL = new MimeType("text/x-script.perl"); public static final MimeType TEXT_X_SCRIPT_PERL_MODULE = new MimeType("text/x-script.perl-module"); public static final MimeType TEXT_X_SCRIPT_PHYTON = new MimeType("text/x-script.phyton"); public static final MimeType TEXT_X_SCRIPT_REXX = new MimeType("text/x-script.rexx"); public static final MimeType TEXT_X_SCRIPT_SCHEME = new MimeType("text/x-script.scheme"); public static final MimeType TEXT_X_SCRIPT_SH = new MimeType("text/x-script.sh"); public static final MimeType TEXT_X_SCRIPT_TCL = new MimeType("text/x-script.tcl"); public static final MimeType TEXT_X_SCRIPT_TCSH = new MimeType("text/x-script.tcsh"); public static final MimeType TEXT_X_SCRIPT_ZSH = new MimeType("text/x-script.zsh"); public static final MimeType TEXT_X_SERVER_PARSED_HTML = new MimeType("text/x-server-parsed-html"); public static final MimeType TEXT_X_SETEXT = new MimeType("text/x-setext"); public static final MimeType TEXT_X_SGML = new MimeType("text/x-sgml"); public static final MimeType TEXT_X_SPEECH = new MimeType("text/x-speech"); public static final MimeType TEXT_X_UIL = new MimeType("text/x-uil"); public static final MimeType TEXT_X_UUENCODE = new MimeType("text/x-uuencode"); public static final MimeType TEXT_X_VCALENDAR = new MimeType("text/x-vcalendar"); public static final MimeType VIDEO_ANIMAFLEX = new MimeType("video/animaflex"); public static final MimeType VIDEO_AVI = new MimeType("video/avi"); public static final MimeType VIDEO_AVS_VIDEO = new MimeType("video/avs-video"); public static final MimeType VIDEO_DL = new MimeType("video/dl"); public static final MimeType VIDEO_FLI = new MimeType("video/fli"); public static final MimeType VIDEO_GL = new MimeType("video/gl"); public static final MimeType VIDEO_MPEG = new MimeType("video/mpeg"); public static final MimeType VIDEO_MSVIDEO = new MimeType("video/msvideo"); public static final MimeType VIDEO_QUICKTIME = new MimeType("video/quicktime"); public static final MimeType VIDEO_VDO = new MimeType("video/vdo"); public static final MimeType VIDEO_VIVO = new MimeType("video/vivo"); public static final MimeType VIDEO_VND_RN_REALVIDEO = new MimeType("video/vnd.rn-realvideo"); public static final MimeType VIDEO_VND_VIVO = new MimeType("video/vnd.vivo"); public static final MimeType VIDEO_VOSAIC = new MimeType("video/vosaic"); public static final MimeType VIDEO_X_AMT_DEMORUN = new MimeType("video/x-amt-demorun"); public static final MimeType VIDEO_X_AMT_SHOWRUN = new MimeType("video/x-amt-showrun"); public static final MimeType VIDEO_X_ATOMIC3D_FEATURE = new MimeType("video/x-atomic3d-feature"); public static final MimeType VIDEO_X_DL = new MimeType("video/x-dl"); public static final MimeType VIDEO_X_DV = new MimeType("video/x-dv"); public static final MimeType VIDEO_X_FLI = new MimeType("video/x-fli"); public static final MimeType VIDEO_X_GL = new MimeType("video/x-gl"); public static final MimeType VIDEO_X_ISVIDEO = new MimeType("video/x-isvideo"); public static final MimeType VIDEO_X_MOTION_JPEG = new MimeType("video/x-motion-jpeg"); public static final MimeType VIDEO_X_MPEG = new MimeType("video/x-mpeg"); public static final MimeType VIDEO_X_MPEQ2A = new MimeType("video/x-mpeq2a"); public static final MimeType VIDEO_X_MSVIDEO = new MimeType("video/x-msvideo"); public static final MimeType VIDEO_X_MS_ASF = new MimeType("video/x-ms-asf"); public static final MimeType VIDEO_X_MS_ASF_PLUGIN = new MimeType("video/x-ms-asf-plugin"); public static final MimeType VIDEO_X_QTC = new MimeType("video/x-qtc"); public static final MimeType VIDEO_X_SCM = new MimeType("video/x-scm"); public static final MimeType VIDEO_X_SGI_MOVIE = new MimeType("video/x-sgi-movie"); public static final MimeType WINDOWS_METAFILE = new MimeType("windows/metafile"); public static final MimeType WWW_MIME = new MimeType("www/mime"); public static final MimeType XGL_DRAWING = new MimeType("xgl/drawing"); public static final MimeType XGL_MOVIE = new MimeType("xgl/movie"); public static final MimeType X_CONFERENCE_X_COOLTALK = new MimeType("x-conference/x-cooltalk"); public static final MimeType X_MUSIC_X_MIDI = new MimeType("x-music/x-midi"); public static final MimeType X_WORLD_X_3DMF = new MimeType("x-world/x-3dmf"); public static final MimeType X_WORLD_X_SVR = new MimeType("x-world/x-svr"); public static final MimeType X_WORLD_X_VRML = new MimeType("x-world/x-vrml"); public static final MimeType X_WORLD_X_VRT = new MimeType("x-world/x-vrt"); public static final String _3DM = "3dm"; public static final String _3DMF = "3dmf"; public static final String A = "a"; public static final String AAB = "aab"; public static final String AAM = "aam"; public static final String AAS = "aas"; public static final String ABC = "abc"; public static final String ACGI = "acgi"; public static final String AFL = "afl"; public static final String AI = "ai"; public static final String AIF = "aif"; public static final String AIFC = "aifc"; public static final String AIFF = "aiff"; public static final String AIM = "aim"; public static final String AIP = "aip"; public static final String ANI = "ani"; public static final String AOS = "aos"; public static final String APS = "aps"; public static final String ARC = "arc"; public static final String ARJ = "arj"; public static final String ART = "art"; public static final String ASF = "asf"; public static final String ASM = "asm"; public static final String ASP = "asp"; public static final String ASX = "asx"; public static final String AU = "au"; public static final String AVI = "avi"; public static final String AVS = "avs"; public static final String BCPIO = "bcpio"; public static final String BIN = "bin"; public static final String BM = "bm"; public static final String BMP = "bmp"; public static final String BOO = "boo"; public static final String BOOK = "book"; public static final String BOZ = "boz"; public static final String BSH = "bsh"; public static final String BZ = "bz"; public static final String BZ2 = "bz2"; public static final String C = "c"; public static final String C_PLUS_PLUS = "c++"; public static final String CAT = "cat"; public static final String CC = "cc"; public static final String CCAD = "ccad"; public static final String CCO = "cco"; public static final String CDF = "cdf"; public static final String CER = "cer"; public static final String CHA = "cha"; public static final String CHAT = "chat"; public static final String CLASS = "class"; public static final String COM = "com"; public static final String CONF = "conf"; public static final String CPIO = "cpio"; public static final String CPP = "cpp"; public static final String CPT = "cpt"; public static final String CRL = "crl"; public static final String CRT = "crt"; public static final String CSH = "csh"; public static final String CSS = "css"; public static final String CXX = "cxx"; public static final String DCR = "dcr"; public static final String DEEPV = "deepv"; public static final String DEF = "def"; public static final String DER = "der"; public static final String DIF = "dif"; public static final String DIR = "dir"; public static final String DL = "dl"; public static final String DOC = "doc"; public static final String DOT = "dot"; public static final String DP = "dp"; public static final String DRW = "drw"; public static final String DUMP = "dump"; public static final String DV = "dv"; public static final String DVI = "dvi"; public static final String DWF = "dwf"; public static final String DWG = "dwg"; public static final String DXF = "dxf"; public static final String DXR = "dxr"; public static final String EL = "el"; public static final String ELC = "elc"; public static final String ENV = "env"; public static final String EPS = "eps"; public static final String ES = "es"; public static final String ETX = "etx"; public static final String EVY = "evy"; public static final String EXE = "exe"; public static final String F = "f"; public static final String F77 = "f77"; public static final String F90 = "f90"; public static final String FDF = "fdf"; public static final String FIF = "fif"; public static final String FLI = "fli"; public static final String FLO = "flo"; public static final String FLX = "flx"; public static final String FMF = "fmf"; public static final String FOR = "for"; public static final String FPX = "fpx"; public static final String FRL = "frl"; public static final String FUNK = "funk"; public static final String G = "g"; public static final String G3 = "g3"; public static final String GIF = "gif"; public static final String GL = "gl"; public static final String GSD = "gsd"; public static final String GSM = "gsm"; public static final String GSP = "gsp"; public static final String GSS = "gss"; public static final String GTAR = "gtar"; public static final String GZ = "gz"; public static final String GZIP = "gzip"; public static final String H = "h"; public static final String HDF = "hdf"; public static final String HELP = "help"; public static final String HGL = "hgl"; public static final String HH = "hh"; public static final String HLB = "hlb"; public static final String HLP = "hlp"; public static final String HPG = "hpg"; public static final String HPGL = "hpgl"; public static final String HQX = "hqx"; public static final String HTA = "hta"; public static final String HTC = "htc"; public static final String HTM = "htm"; public static final String HTML = "html"; public static final String HTMLS = "htmls"; public static final String HTT = "htt"; public static final String HTX = "htx"; public static final String ICE = "ice"; public static final String ICO = "ico"; public static final String IDC = "idc"; public static final String IEF = "ief"; public static final String IEFS = "iefs"; public static final String IGES = "iges"; public static final String IGS = "igs"; public static final String IMA = "ima"; public static final String IMAP = "imap"; public static final String INF = "inf"; public static final String INS = "ins"; public static final String IP = "ip"; public static final String ISU = "isu"; public static final String IT = "it"; public static final String IV = "iv"; public static final String IVR = "ivr"; public static final String IVY = "ivy"; public static final String JAM = "jam"; public static final String JAV = "jav"; public static final String JAVA = "java"; public static final String JCM = "jcm"; public static final String JFIF = "jfif"; public static final String JFIF_TBNL = "jfif-tbnl"; public static final String JPE = "jpe"; public static final String JPEG = "jpeg"; public static final String JPG = "jpg"; public static final String JPS = "jps"; public static final String JS = "js"; public static final String JSON = "json"; public static final String JUT = "jut"; public static final String KAR = "kar"; public static final String KSH = "ksh"; public static final String LA = "la"; public static final String LAM = "lam"; public static final String LATEX = "latex"; public static final String LHA = "lha"; public static final String LHX = "lhx"; public static final String LIST = "list"; public static final String LMA = "lma"; public static final String LOG = "log"; public static final String LSP = "lsp"; public static final String LST = "lst"; public static final String LSX = "lsx"; public static final String LTX = "ltx"; public static final String LZH = "lzh"; public static final String LZX = "lzx"; public static final String M = "m"; public static final String M1V = "m1v"; public static final String M2A = "m2a"; public static final String M2V = "m2v"; public static final String M3U = "m3u"; public static final String MAN = "man"; public static final String MAP = "map"; public static final String MAR = "mar"; public static final String MBD = "mbd"; public static final String MC$ = "mc$"; public static final String MCD = "mcd"; public static final String MCF = "mcf"; public static final String MCP = "mcp"; public static final String ME = "me"; public static final String MHT = "mht"; public static final String MHTML = "mhtml"; public static final String MID = "mid"; public static final String MIDI = "midi"; public static final String MIF = "mif"; public static final String MIME = "mime"; public static final String MJF = "mjf"; public static final String MJPG = "mjpg"; public static final String MM = "mm"; public static final String MME = "mme"; public static final String MOD = "mod"; public static final String MOOV = "moov"; public static final String MOV = "mov"; public static final String MOVIE = "movie"; public static final String MP2 = "mp2"; public static final String MP3 = "mp3"; public static final String MPA = "mpa"; public static final String MPC = "mpc"; public static final String MPE = "mpe"; public static final String MPEG = "mpeg"; public static final String MPG = "mpg"; public static final String MPGA = "mpga"; public static final String MPP = "mpp"; public static final String MPT = "mpt"; public static final String MPV = "mpv"; public static final String MPX = "mpx"; public static final String MRC = "mrc"; public static final String MS = "ms"; public static final String MV = "mv"; public static final String MY = "my"; public static final String MZZ = "mzz"; public static final String NAP = "nap"; public static final String NAPLPS = "naplps"; public static final String NC = "nc"; public static final String NCM = "ncm"; public static final String NIF = "nif"; public static final String NIFF = "niff"; public static final String NIX = "nix"; public static final String NSC = "nsc"; public static final String NVD = "nvd"; public static final String O = "o"; public static final String ODA = "oda"; public static final String OMC = "omc"; public static final String OMCD = "omcd"; public static final String OMCR = "omcr"; public static final String P = "p"; public static final String P10 = "p10"; public static final String P12 = "p12"; public static final String P7A = "p7a"; public static final String P7C = "p7c"; public static final String P7M = "p7m"; public static final String P7R = "p7r"; public static final String P7S = "p7s"; public static final String PART = "part"; public static final String PAS = "pas"; public static final String PBM = "pbm"; public static final String PCL = "pcl"; public static final String PCT = "pct"; public static final String PCX = "pcx"; public static final String PDB = "pdb"; public static final String PDF = "pdf"; public static final String PFUNK = "pfunk"; public static final String PGM = "pgm"; public static final String PIC = "pic"; public static final String PICT = "pict"; public static final String PKG = "pkg"; public static final String PKO = "pko"; public static final String PL = "pl"; public static final String PLX = "plx"; public static final String PM = "pm"; public static final String PM4 = "pm4"; public static final String PM5 = "pm5"; public static final String PNG = "png"; public static final String PNM = "pnm"; public static final String POT = "pot"; public static final String POV = "pov"; public static final String PPA = "ppa"; public static final String PPM = "ppm"; public static final String PPS = "pps"; public static final String PPT = "ppt"; public static final String PPZ = "ppz"; public static final String PRE = "pre"; public static final String PRT = "prt"; public static final String PS = "ps"; public static final String PSD = "psd"; public static final String PVU = "pvu"; public static final String PWZ = "pwz"; public static final String PY = "py"; public static final String PYC = "pyc"; public static final String QCP = "qcp"; public static final String QD3 = "qd3"; public static final String QD3D = "qd3d"; public static final String QIF = "qif"; public static final String QT = "qt"; public static final String QTC = "qtc"; public static final String QTI = "qti"; public static final String QTIF = "qtif"; public static final String RA = "ra"; public static final String RAM = "ram"; public static final String RAS = "ras"; public static final String RAST = "rast"; public static final String REXX = "rexx"; public static final String RF = "rf"; public static final String RGB = "rgb"; public static final String RM = "rm"; public static final String RMI = "rmi"; public static final String RMM = "rmm"; public static final String RMP = "rmp"; public static final String RNG = "rng"; public static final String RNX = "rnx"; public static final String ROFF = "roff"; public static final String RP = "rp"; public static final String RPM = "rpm"; public static final String RT = "rt"; public static final String RTF = "rtf"; public static final String RTX = "rtx"; public static final String RV = "rv"; public static final String S = "s"; public static final String S3M = "s3m"; public static final String SAVEME = "saveme"; public static final String SBK = "sbk"; public static final String SCM = "scm"; public static final String SDML = "sdml"; public static final String SDP = "sdp"; public static final String SDR = "sdr"; public static final String SEA = "sea"; public static final String SET = "set"; public static final String SGM = "sgm"; public static final String SGML = "sgml"; public static final String SH = "sh"; public static final String SHAR = "shar"; public static final String SHTML = "shtml"; public static final String SID = "sid"; public static final String SIT = "sit"; public static final String SKD = "skd"; public static final String SKM = "skm"; public static final String SKP = "skp"; public static final String SKT = "skt"; public static final String SL = "sl"; public static final String SMI = "smi"; public static final String SMIL = "smil"; public static final String SND = "snd"; public static final String SOL = "sol"; public static final String SPC = "spc"; public static final String SPL = "spl"; public static final String SPR = "spr"; public static final String SPRITE = "sprite"; public static final String SRC = "src"; public static final String SSI = "ssi"; public static final String SSM = "ssm"; public static final String SST = "sst"; public static final String STEP = "step"; public static final String STL = "stl"; public static final String STP = "stp"; public static final String SV4CPIO = "sv4cpio"; public static final String SV4CRC = "sv4crc"; public static final String SVF = "svf"; public static final String SVR = "svr"; public static final String SWF = "swf"; public static final String T = "t"; public static final String TALK = "talk"; public static final String TAR = "tar"; public static final String TBK = "tbk"; public static final String TCL = "tcl"; public static final String TCSH = "tcsh"; public static final String TEX = "tex"; public static final String TEXI = "texi"; public static final String TEXINFO = "texinfo"; public static final String TEXT = "text"; public static final String TGZ = "tgz"; public static final String TIF = "tif"; public static final String TIFF = "tiff"; public static final String TR = "tr"; public static final String TSI = "tsi"; public static final String TSP = "tsp"; public static final String TSV = "tsv"; public static final String TURBOT = "turbot"; public static final String TXT = "txt"; public static final String UIL = "uil"; public static final String UNI = "uni"; public static final String UNIS = "unis"; public static final String UNV = "unv"; public static final String URI = "uri"; public static final String URIS = "uris"; public static final String USTAR = "ustar"; public static final String UU = "uu"; public static final String UUE = "uue"; public static final String VCD = "vcd"; public static final String VCS = "vcs"; public static final String VDA = "vda"; public static final String VDO = "vdo"; public static final String VEW = "vew"; public static final String VIV = "viv"; public static final String VIVO = "vivo"; public static final String VMD = "vmd"; public static final String VMF = "vmf"; public static final String VOC = "voc"; public static final String VOS = "vos"; public static final String VOX = "vox"; public static final String VQE = "vqe"; public static final String VQF = "vqf"; public static final String VQL = "vql"; public static final String VRML = "vrml"; public static final String VRT = "vrt"; public static final String VSD = "vsd"; public static final String VST = "vst"; public static final String VSW = "vsw"; public static final String W60 = "w60"; public static final String W61 = "w61"; public static final String W6W = "w6w"; public static final String WAV = "wav"; public static final String WB1 = "wb1"; public static final String WBMP = "wbmp"; public static final String WEB = "web"; public static final String WIZ = "wiz"; public static final String WK1 = "wk1"; public static final String WMF = "wmf"; public static final String WML = "wml"; public static final String WMLC = "wmlc"; public static final String WMLS = "wmls"; public static final String WMLSC = "wmlsc"; public static final String WORD = "word"; public static final String WP = "wp"; public static final String WP5 = "wp5"; public static final String WP6 = "wp6"; public static final String WPD = "wpd"; public static final String WQ1 = "wq1"; public static final String WRI = "wri"; public static final String WRL = "wrl"; public static final String WRZ = "wrz"; public static final String WSC = "wsc"; public static final String WSRC = "wsrc"; public static final String WTK = "wtk"; public static final String X_PNG = "x-png"; public static final String XBM = "xbm"; public static final String XDR = "xdr"; public static final String XGZ = "xgz"; public static final String XIF = "xif"; public static final String XL = "xl"; public static final String XLA = "xla"; public static final String XLB = "xlb"; public static final String XLC = "xlc"; public static final String XLD = "xld"; public static final String XLK = "xlk"; public static final String XLL = "xll"; public static final String XLM = "xlm"; public static final String XLS = "xls"; public static final String XLT = "xlt"; public static final String XLV = "xlv"; public static final String XLW = "xlw"; public static final String XM = "xm"; public static final String XML = "xml"; public static final String XMZ = "xmz"; public static final String XPIX = "xpix"; public static final String XPM = "xpm"; public static final String XSR = "xsr"; public static final String XWD = "xwd"; public static final String XYZ = "xyz"; public static final String Z = "z"; public static final String ZIP = "zip"; public static final String ZOO = "zoo"; public static final String ZSH = "zsh"; private static final Map<String, MimeType> types = new HashMap<>(); private static final Map<String, MimeType> typesBySuffix = new HashMap<>(); static { types.put(APPLICATION_ACAD.toString(), APPLICATION_ACAD); types.put(APPLICATION_ARJ.toString(), APPLICATION_ARJ); types.put(APPLICATION_BASE64.toString(), APPLICATION_BASE64); types.put(APPLICATION_BINHEX.toString(), APPLICATION_BINHEX); types.put(APPLICATION_BINHEX4.toString(), APPLICATION_BINHEX4); types.put(APPLICATION_BOOK.toString(), APPLICATION_BOOK); types.put(APPLICATION_CDF.toString(), APPLICATION_CDF); types.put(APPLICATION_CLARISCAD.toString(), APPLICATION_CLARISCAD); types.put(APPLICATION_COMMONGROUND.toString(), APPLICATION_COMMONGROUND); types.put(APPLICATION_DRAFTING.toString(), APPLICATION_DRAFTING); types.put(APPLICATION_DSPTYPE.toString(), APPLICATION_DSPTYPE); types.put(APPLICATION_DXF.toString(), APPLICATION_DXF); types.put(APPLICATION_ECMASCRIPT.toString(), APPLICATION_ECMASCRIPT); types.put(APPLICATION_ENVOY.toString(), APPLICATION_ENVOY); types.put(APPLICATION_EXCEL.toString(), APPLICATION_EXCEL); types.put(APPLICATION_FRACTALS.toString(), APPLICATION_FRACTALS); types.put(APPLICATION_FREELOADER.toString(), APPLICATION_FREELOADER); types.put(APPLICATION_FUTURESPLASH.toString(), APPLICATION_FUTURESPLASH); types.put(APPLICATION_GNUTAR.toString(), APPLICATION_GNUTAR); types.put(APPLICATION_GROUPWISE.toString(), APPLICATION_GROUPWISE); types.put(APPLICATION_HLP.toString(), APPLICATION_HLP); types.put(APPLICATION_HTA.toString(), APPLICATION_HTA); types.put(APPLICATION_IGES.toString(), APPLICATION_IGES); types.put(APPLICATION_INF.toString(), APPLICATION_INF); types.put(APPLICATION_I_DEAS.toString(), APPLICATION_I_DEAS); types.put(APPLICATION_JAVA.toString(), APPLICATION_JAVA); types.put(APPLICATION_JSON.toString(), APPLICATION_JSON); types.put(APPLICATION_JAVASCRIPT.toString(), APPLICATION_JAVASCRIPT); types.put(APPLICATION_JAVA_BYTE_CODE.toString(), APPLICATION_JAVA_BYTE_CODE); types.put(APPLICATION_LHA.toString(), APPLICATION_LHA); types.put(APPLICATION_LZX.toString(), APPLICATION_LZX); types.put(APPLICATION_MACBINARY.toString(), APPLICATION_MACBINARY); types.put(APPLICATION_MAC_BINARY.toString(), APPLICATION_MAC_BINARY); types.put(APPLICATION_MAC_BINHEX.toString(), APPLICATION_MAC_BINHEX); types.put(APPLICATION_MAC_BINHEX40.toString(), APPLICATION_MAC_BINHEX40); types.put(APPLICATION_MAC_COMPACTPRO.toString(), APPLICATION_MAC_COMPACTPRO); types.put(APPLICATION_MARC.toString(), APPLICATION_MARC); types.put(APPLICATION_MBEDLET.toString(), APPLICATION_MBEDLET); types.put(APPLICATION_MCAD.toString(), APPLICATION_MCAD); types.put(APPLICATION_MIME.toString(), APPLICATION_MIME); types.put(APPLICATION_MSPOWERPOINT.toString(), APPLICATION_MSPOWERPOINT); types.put(APPLICATION_MSWORD.toString(), APPLICATION_MSWORD); types.put(APPLICATION_MSWRITE.toString(), APPLICATION_MSWRITE); types.put(APPLICATION_NETMC.toString(), APPLICATION_NETMC); types.put(APPLICATION_OCTET_STREAM.toString(), APPLICATION_OCTET_STREAM); types.put(APPLICATION_ODA.toString(), APPLICATION_ODA); types.put(APPLICATION_PDF.toString(), APPLICATION_PDF); types.put(APPLICATION_PKCS10.toString(), APPLICATION_PKCS10); types.put(APPLICATION_PKCS7_MIME.toString(), APPLICATION_PKCS7_MIME); types.put(APPLICATION_PKCS7_SIGNATURE.toString(), APPLICATION_PKCS7_SIGNATURE); types.put(APPLICATION_PKCS_12.toString(), APPLICATION_PKCS_12); types.put(APPLICATION_PKCS_CRL.toString(), APPLICATION_PKCS_CRL); types.put(APPLICATION_PKIX_CERT.toString(), APPLICATION_PKIX_CERT); types.put(APPLICATION_PKIX_CRL.toString(), APPLICATION_PKIX_CRL); types.put(APPLICATION_PLAIN.toString(), APPLICATION_PLAIN); types.put(APPLICATION_POSTSCRIPT.toString(), APPLICATION_POSTSCRIPT); types.put(APPLICATION_POWERPOINT.toString(), APPLICATION_POWERPOINT); types.put(APPLICATION_PRO_ENG.toString(), APPLICATION_PRO_ENG); types.put(APPLICATION_RINGING_TONES.toString(), APPLICATION_RINGING_TONES); types.put(APPLICATION_RTF.toString(), APPLICATION_RTF); types.put(APPLICATION_SDP.toString(), APPLICATION_SDP); types.put(APPLICATION_SEA.toString(), APPLICATION_SEA); types.put(APPLICATION_SET.toString(), APPLICATION_SET); types.put(APPLICATION_SLA.toString(), APPLICATION_SLA); types.put(APPLICATION_SMIL.toString(), APPLICATION_SMIL); types.put(APPLICATION_SOLIDS.toString(), APPLICATION_SOLIDS); types.put(APPLICATION_SOUNDER.toString(), APPLICATION_SOUNDER); types.put(APPLICATION_STEP.toString(), APPLICATION_STEP); types.put(APPLICATION_STREAMINGMEDIA.toString(), APPLICATION_STREAMINGMEDIA); types.put(APPLICATION_TOOLBOOK.toString(), APPLICATION_TOOLBOOK); types.put(APPLICATION_VDA.toString(), APPLICATION_VDA); types.put(APPLICATION_VND_FDF.toString(), APPLICATION_VND_FDF); types.put(APPLICATION_VND_HP_HPGL.toString(), APPLICATION_VND_HP_HPGL); types.put(APPLICATION_VND_HP_PCL.toString(), APPLICATION_VND_HP_PCL); types.put(APPLICATION_VND_MS_EXCEL.toString(), APPLICATION_VND_MS_EXCEL); types.put(APPLICATION_VND_MS_PKI_CERTSTORE.toString(), APPLICATION_VND_MS_PKI_CERTSTORE); types.put(APPLICATION_VND_MS_PKI_PKO.toString(), APPLICATION_VND_MS_PKI_PKO); types.put(APPLICATION_VND_MS_PKI_SECCAT.toString(), APPLICATION_VND_MS_PKI_SECCAT); types.put(APPLICATION_VND_MS_PKI_STL.toString(), APPLICATION_VND_MS_PKI_STL); types.put(APPLICATION_VND_MS_POWERPOINT.toString(), APPLICATION_VND_MS_POWERPOINT); types.put(APPLICATION_VND_MS_PROJECT.toString(), APPLICATION_VND_MS_PROJECT); types.put(APPLICATION_VND_NOKIA_CONFIGURATION_MESSAGE.toString(), APPLICATION_VND_NOKIA_CONFIGURATION_MESSAGE); types.put(APPLICATION_VND_NOKIA_RINGING_TONE.toString(), APPLICATION_VND_NOKIA_RINGING_TONE); types.put(APPLICATION_VND_RN_REALMEDIA.toString(), APPLICATION_VND_RN_REALMEDIA); types.put(APPLICATION_VND_RN_REALPLAYER.toString(), APPLICATION_VND_RN_REALPLAYER); types.put(APPLICATION_VND_WAP_WMLC.toString(), APPLICATION_VND_WAP_WMLC); types.put(APPLICATION_VND_WAP_WMLSCRIPTC.toString(), APPLICATION_VND_WAP_WMLSCRIPTC); types.put(APPLICATION_VND_XARA.toString(), APPLICATION_VND_XARA); types.put(APPLICATION_VOCALTEC_MEDIA_DESC.toString(), APPLICATION_VOCALTEC_MEDIA_DESC); types.put(APPLICATION_VOCALTEC_MEDIA_FILE.toString(), APPLICATION_VOCALTEC_MEDIA_FILE); types.put(APPLICATION_WORDPERFECT.toString(), APPLICATION_WORDPERFECT); types.put(APPLICATION_WORDPERFECT6_0.toString(), APPLICATION_WORDPERFECT6_0); types.put(APPLICATION_WORDPERFECT6_1.toString(), APPLICATION_WORDPERFECT6_1); types.put(APPLICATION_XML.toString(), APPLICATION_XML); types.put(APPLICATION_X_123.toString(), APPLICATION_X_123); types.put(APPLICATION_X_AIM.toString(), APPLICATION_X_AIM); types.put(APPLICATION_X_AUTHORWARE_BIN.toString(), APPLICATION_X_AUTHORWARE_BIN); types.put(APPLICATION_X_AUTHORWARE_MAP.toString(), APPLICATION_X_AUTHORWARE_MAP); types.put(APPLICATION_X_AUTHORWARE_SEG.toString(), APPLICATION_X_AUTHORWARE_SEG); types.put(APPLICATION_X_BCPIO.toString(), APPLICATION_X_BCPIO); types.put(APPLICATION_X_BINARY.toString(), APPLICATION_X_BINARY); types.put(APPLICATION_X_BINHEX40.toString(), APPLICATION_X_BINHEX40); types.put(APPLICATION_X_BSH.toString(), APPLICATION_X_BSH); types.put(APPLICATION_X_BYTECODE_ELISP.toString(), APPLICATION_X_BYTECODE_ELISP); types.put(APPLICATION_X_BYTECODE_PYTHON.toString(), APPLICATION_X_BYTECODE_PYTHON); types.put(APPLICATION_X_BZIP.toString(), APPLICATION_X_BZIP); types.put(APPLICATION_X_BZIP2.toString(), APPLICATION_X_BZIP2); types.put(APPLICATION_X_CDF.toString(), APPLICATION_X_CDF); types.put(APPLICATION_X_CDLINK.toString(), APPLICATION_X_CDLINK); types.put(APPLICATION_X_CHAT.toString(), APPLICATION_X_CHAT); types.put(APPLICATION_X_CMU_RASTER.toString(), APPLICATION_X_CMU_RASTER); types.put(APPLICATION_X_COCOA.toString(), APPLICATION_X_COCOA); types.put(APPLICATION_X_COMPACTPRO.toString(), APPLICATION_X_COMPACTPRO); types.put(APPLICATION_X_COMPRESS.toString(), APPLICATION_X_COMPRESS); types.put(APPLICATION_X_COMPRESSED.toString(), APPLICATION_X_COMPRESSED); types.put(APPLICATION_X_CONFERENCE.toString(), APPLICATION_X_CONFERENCE); types.put(APPLICATION_X_CPIO.toString(), APPLICATION_X_CPIO); types.put(APPLICATION_X_CPT.toString(), APPLICATION_X_CPT); types.put(APPLICATION_X_CSH.toString(), APPLICATION_X_CSH); types.put(APPLICATION_X_DEEPV.toString(), APPLICATION_X_DEEPV); types.put(APPLICATION_X_DIRECTOR.toString(), APPLICATION_X_DIRECTOR); types.put(APPLICATION_X_DVI.toString(), APPLICATION_X_DVI); types.put(APPLICATION_X_ELC.toString(), APPLICATION_X_ELC); types.put(APPLICATION_X_ENVOY.toString(), APPLICATION_X_ENVOY); types.put(APPLICATION_X_ESREHBER.toString(), APPLICATION_X_ESREHBER); types.put(APPLICATION_X_EXCEL.toString(), APPLICATION_X_EXCEL); types.put(APPLICATION_X_FRAME.toString(), APPLICATION_X_FRAME); types.put(APPLICATION_X_FREELANCE.toString(), APPLICATION_X_FREELANCE); types.put(APPLICATION_X_GSP.toString(), APPLICATION_X_GSP); types.put(APPLICATION_X_GSS.toString(), APPLICATION_X_GSS); types.put(APPLICATION_X_GTAR.toString(), APPLICATION_X_GTAR); types.put(APPLICATION_X_GZIP.toString(), APPLICATION_X_GZIP); types.put(APPLICATION_X_HDF.toString(), APPLICATION_X_HDF); types.put(APPLICATION_X_HELPFILE.toString(), APPLICATION_X_HELPFILE); types.put(APPLICATION_X_HTTPD_IMAP.toString(), APPLICATION_X_HTTPD_IMAP); types.put(APPLICATION_X_IMA.toString(), APPLICATION_X_IMA); types.put(APPLICATION_X_INTERNETT_SIGNUP.toString(), APPLICATION_X_INTERNETT_SIGNUP); types.put(APPLICATION_X_INVENTOR.toString(), APPLICATION_X_INVENTOR); types.put(APPLICATION_X_IP2.toString(), APPLICATION_X_IP2); types.put(APPLICATION_X_JAVASCRIPT.toString(), APPLICATION_X_JAVASCRIPT); types.put(APPLICATION_X_JAVA_CLASS.toString(), APPLICATION_X_JAVA_CLASS); types.put(APPLICATION_X_JAVA_COMMERCE.toString(), APPLICATION_X_JAVA_COMMERCE); types.put(APPLICATION_X_KOAN.toString(), APPLICATION_X_KOAN); types.put(APPLICATION_X_KSH.toString(), APPLICATION_X_KSH); types.put(APPLICATION_X_LATEX.toString(), APPLICATION_X_LATEX); types.put(APPLICATION_X_LHA.toString(), APPLICATION_X_LHA); types.put(APPLICATION_X_LISP.toString(), APPLICATION_X_LISP); types.put(APPLICATION_X_LIVESCREEN.toString(), APPLICATION_X_LIVESCREEN); types.put(APPLICATION_X_LOTUS.toString(), APPLICATION_X_LOTUS); types.put(APPLICATION_X_LOTUSSCREENCAM.toString(), APPLICATION_X_LOTUSSCREENCAM); types.put(APPLICATION_X_LZH.toString(), APPLICATION_X_LZH); types.put(APPLICATION_X_LZX.toString(), APPLICATION_X_LZX); types.put(APPLICATION_X_MACBINARY.toString(), APPLICATION_X_MACBINARY); types.put(APPLICATION_X_MAC_BINHEX40.toString(), APPLICATION_X_MAC_BINHEX40); types.put(APPLICATION_X_MAGIC_CAP_PACKAGE_1_0.toString(), APPLICATION_X_MAGIC_CAP_PACKAGE_1_0); types.put(APPLICATION_X_MATHCAD.toString(), APPLICATION_X_MATHCAD); types.put(APPLICATION_X_MEME.toString(), APPLICATION_X_MEME); types.put(APPLICATION_X_MIDI.toString(), APPLICATION_X_MIDI); types.put(APPLICATION_X_MIF.toString(), APPLICATION_X_MIF); types.put(APPLICATION_X_MIX_TRANSFER.toString(), APPLICATION_X_MIX_TRANSFER); types.put(APPLICATION_X_MPLAYER2.toString(), APPLICATION_X_MPLAYER2); types.put(APPLICATION_X_MSEXCEL.toString(), APPLICATION_X_MSEXCEL); types.put(APPLICATION_X_MSPOWERPOINT.toString(), APPLICATION_X_MSPOWERPOINT); types.put(APPLICATION_X_NAVIDOC.toString(), APPLICATION_X_NAVIDOC); types.put(APPLICATION_X_NAVIMAP.toString(), APPLICATION_X_NAVIMAP); types.put(APPLICATION_X_NAVISTYLE.toString(), APPLICATION_X_NAVISTYLE); types.put(APPLICATION_X_NAVI_ANIMATION.toString(), APPLICATION_X_NAVI_ANIMATION); types.put(APPLICATION_X_NETCDF.toString(), APPLICATION_X_NETCDF); types.put(APPLICATION_X_NEWTON_COMPATIBLE_PKG.toString(), APPLICATION_X_NEWTON_COMPATIBLE_PKG); types.put(APPLICATION_X_NOKIA_9000_COMMUNICATOR_ADD_ON_SOFTWARE.toString(), APPLICATION_X_NOKIA_9000_COMMUNICATOR_ADD_ON_SOFTWARE); types.put(APPLICATION_X_OMC.toString(), APPLICATION_X_OMC); types.put(APPLICATION_X_OMCDATAMAKER.toString(), APPLICATION_X_OMCDATAMAKER); types.put(APPLICATION_X_OMCREGERATOR.toString(), APPLICATION_X_OMCREGERATOR); types.put(APPLICATION_X_PAGEMAKER.toString(), APPLICATION_X_PAGEMAKER); types.put(APPLICATION_X_PCL.toString(), APPLICATION_X_PCL); types.put(APPLICATION_X_PIXCLSCRIPT.toString(), APPLICATION_X_PIXCLSCRIPT); types.put(APPLICATION_X_PKCS10.toString(), APPLICATION_X_PKCS10); types.put(APPLICATION_X_PKCS12.toString(), APPLICATION_X_PKCS12); types.put(APPLICATION_X_PKCS7_CERTIFICATES.toString(), APPLICATION_X_PKCS7_CERTIFICATES); types.put(APPLICATION_X_PKCS7_CERTREQRESP.toString(), APPLICATION_X_PKCS7_CERTREQRESP); types.put(APPLICATION_X_PKCS7_MIME.toString(), APPLICATION_X_PKCS7_MIME); types.put(APPLICATION_X_PKCS7_SIGNATURE.toString(), APPLICATION_X_PKCS7_SIGNATURE); types.put(APPLICATION_X_POINTPLUS.toString(), APPLICATION_X_POINTPLUS); types.put(APPLICATION_X_PORTABLE_ANYMAP.toString(), APPLICATION_X_PORTABLE_ANYMAP); types.put(APPLICATION_X_PROJECT.toString(), APPLICATION_X_PROJECT); types.put(APPLICATION_X_QPRO.toString(), APPLICATION_X_QPRO); types.put(APPLICATION_X_RTF.toString(), APPLICATION_X_RTF); types.put(APPLICATION_X_SDP.toString(), APPLICATION_X_SDP); types.put(APPLICATION_X_SEA.toString(), APPLICATION_X_SEA); types.put(APPLICATION_X_SEELOGO.toString(), APPLICATION_X_SEELOGO); types.put(APPLICATION_X_SH.toString(), APPLICATION_X_SH); types.put(APPLICATION_X_SHAR.toString(), APPLICATION_X_SHAR); types.put(APPLICATION_X_SHOCKWAVE_FLASH.toString(), APPLICATION_X_SHOCKWAVE_FLASH); types.put(APPLICATION_X_SIT.toString(), APPLICATION_X_SIT); types.put(APPLICATION_X_SPRITE.toString(), APPLICATION_X_SPRITE); types.put(APPLICATION_X_STUFFIT.toString(), APPLICATION_X_STUFFIT); types.put(APPLICATION_X_SV4CPIO.toString(), APPLICATION_X_SV4CPIO); types.put(APPLICATION_X_SV4CRC.toString(), APPLICATION_X_SV4CRC); types.put(APPLICATION_X_TAR.toString(), APPLICATION_X_TAR); types.put(APPLICATION_X_TBOOK.toString(), APPLICATION_X_TBOOK); types.put(APPLICATION_X_TCL.toString(), APPLICATION_X_TCL); types.put(APPLICATION_X_TEX.toString(), APPLICATION_X_TEX); types.put(APPLICATION_X_TEXINFO.toString(), APPLICATION_X_TEXINFO); types.put(APPLICATION_X_TROFF.toString(), APPLICATION_X_TROFF); types.put(APPLICATION_X_TROFF_MAN.toString(), APPLICATION_X_TROFF_MAN); types.put(APPLICATION_X_TROFF_ME.toString(), APPLICATION_X_TROFF_ME); types.put(APPLICATION_X_TROFF_MS.toString(), APPLICATION_X_TROFF_MS); types.put(APPLICATION_X_TROFF_MSVIDEO.toString(), APPLICATION_X_TROFF_MSVIDEO); types.put(APPLICATION_X_USTAR.toString(), APPLICATION_X_USTAR); types.put(APPLICATION_X_VISIO.toString(), APPLICATION_X_VISIO); types.put(APPLICATION_X_VND_AUDIOEXPLOSION_MZZ.toString(), APPLICATION_X_VND_AUDIOEXPLOSION_MZZ); types.put(APPLICATION_X_VND_LS_XPIX.toString(), APPLICATION_X_VND_LS_XPIX); types.put(APPLICATION_X_VRML.toString(), APPLICATION_X_VRML); types.put(APPLICATION_X_WAIS_SOURCE.toString(), APPLICATION_X_WAIS_SOURCE); types.put(APPLICATION_X_WINHELP.toString(), APPLICATION_X_WINHELP); types.put(APPLICATION_X_WINTALK.toString(), APPLICATION_X_WINTALK); types.put(APPLICATION_X_WORLD.toString(), APPLICATION_X_WORLD); types.put(APPLICATION_X_WPWIN.toString(), APPLICATION_X_WPWIN); types.put(APPLICATION_X_WRI.toString(), APPLICATION_X_WRI); types.put(APPLICATION_X_X509_CA_CERT.toString(), APPLICATION_X_X509_CA_CERT); types.put(APPLICATION_X_X509_USER_CERT.toString(), APPLICATION_X_X509_USER_CERT); types.put(APPLICATION_X_ZIP_COMPRESSED.toString(), APPLICATION_X_ZIP_COMPRESSED); types.put(APPLICATION_ZIP.toString(), APPLICATION_ZIP); types.put(AUDIO_AIFF.toString(), AUDIO_AIFF); types.put(AUDIO_BASIC.toString(), AUDIO_BASIC); types.put(AUDIO_IT.toString(), AUDIO_IT); types.put(AUDIO_MAKE.toString(), AUDIO_MAKE); types.put(AUDIO_MAKE_MY_FUNK.toString(), AUDIO_MAKE_MY_FUNK); types.put(AUDIO_MID.toString(), AUDIO_MID); types.put(AUDIO_MIDI.toString(), AUDIO_MIDI); types.put(AUDIO_MOD.toString(), AUDIO_MOD); types.put(AUDIO_MPEG.toString(), AUDIO_MPEG); types.put(AUDIO_MPEG3.toString(), AUDIO_MPEG3); types.put(AUDIO_NSPAUDIO.toString(), AUDIO_NSPAUDIO); types.put(AUDIO_S3M.toString(), AUDIO_S3M); types.put(AUDIO_TSPLAYER.toString(), AUDIO_TSPLAYER); types.put(AUDIO_TSP_AUDIO.toString(), AUDIO_TSP_AUDIO); types.put(AUDIO_VND_QCELP.toString(), AUDIO_VND_QCELP); types.put(AUDIO_VOC.toString(), AUDIO_VOC); types.put(AUDIO_VOXWARE.toString(), AUDIO_VOXWARE); types.put(AUDIO_WAV.toString(), AUDIO_WAV); types.put(AUDIO_XM.toString(), AUDIO_XM); types.put(AUDIO_X_ADPCM.toString(), AUDIO_X_ADPCM); types.put(AUDIO_X_AIFF.toString(), AUDIO_X_AIFF); types.put(AUDIO_X_AU.toString(), AUDIO_X_AU); types.put(AUDIO_X_GSM.toString(), AUDIO_X_GSM); types.put(AUDIO_X_JAM.toString(), AUDIO_X_JAM); types.put(AUDIO_X_LIVEAUDIO.toString(), AUDIO_X_LIVEAUDIO); types.put(AUDIO_X_MID.toString(), AUDIO_X_MID); types.put(AUDIO_X_MIDI.toString(), AUDIO_X_MIDI); types.put(AUDIO_X_MOD.toString(), AUDIO_X_MOD); types.put(AUDIO_X_MPEG.toString(), AUDIO_X_MPEG); types.put(AUDIO_X_MPEG_3.toString(), AUDIO_X_MPEG_3); types.put(AUDIO_X_MPEQURL.toString(), AUDIO_X_MPEQURL); types.put(AUDIO_X_NSPAUDIO.toString(), AUDIO_X_NSPAUDIO); types.put(AUDIO_X_PN_REALAUDIO.toString(), AUDIO_X_PN_REALAUDIO); types.put(AUDIO_X_PN_REALAUDIO_PLUGIN.toString(), AUDIO_X_PN_REALAUDIO_PLUGIN); types.put(AUDIO_X_PSID.toString(), AUDIO_X_PSID); types.put(AUDIO_X_REALAUDIO.toString(), AUDIO_X_REALAUDIO); types.put(AUDIO_X_TWINVQ.toString(), AUDIO_X_TWINVQ); types.put(AUDIO_X_TWINVQ_PLUGIN.toString(), AUDIO_X_TWINVQ_PLUGIN); types.put(AUDIO_X_VND_AUDIOEXPLOSION_MJUICEMEDIAFILE.toString(), AUDIO_X_VND_AUDIOEXPLOSION_MJUICEMEDIAFILE); types.put(AUDIO_X_VOC.toString(), AUDIO_X_VOC); types.put(AUDIO_X_WAV.toString(), AUDIO_X_WAV); types.put(CHEMICAL_X_PDB.toString(), CHEMICAL_X_PDB); types.put(DRAWING_X_DWF.toString(), DRAWING_X_DWF); types.put(IMAGE_BMP.toString(), IMAGE_BMP); types.put(IMAGE_CMU_RASTER.toString(), IMAGE_CMU_RASTER); types.put(IMAGE_FIF.toString(), IMAGE_FIF); types.put(IMAGE_FLORIAN.toString(), IMAGE_FLORIAN); types.put(IMAGE_G3FAX.toString(), IMAGE_G3FAX); types.put(IMAGE_GIF.toString(), IMAGE_GIF); types.put(IMAGE_IEF.toString(), IMAGE_IEF); types.put(IMAGE_JPEG.toString(), IMAGE_JPEG); types.put(IMAGE_JUTVISION.toString(), IMAGE_JUTVISION); types.put(IMAGE_NAPLPS.toString(), IMAGE_NAPLPS); types.put(IMAGE_PICT.toString(), IMAGE_PICT); types.put(IMAGE_PJPEG.toString(), IMAGE_PJPEG); types.put(IMAGE_PNG.toString(), IMAGE_PNG); types.put(IMAGE_TIFF.toString(), IMAGE_TIFF); types.put(IMAGE_VASA.toString(), IMAGE_VASA); types.put(IMAGE_VND_DWG.toString(), IMAGE_VND_DWG); types.put(IMAGE_VND_FPX.toString(), IMAGE_VND_FPX); types.put(IMAGE_VND_NET_FPX.toString(), IMAGE_VND_NET_FPX); types.put(IMAGE_VND_RN_REALFLASH.toString(), IMAGE_VND_RN_REALFLASH); types.put(IMAGE_VND_RN_REALPIX.toString(), IMAGE_VND_RN_REALPIX); types.put(IMAGE_VND_WAP_WBMP.toString(), IMAGE_VND_WAP_WBMP); types.put(IMAGE_VND_XIFF.toString(), IMAGE_VND_XIFF); types.put(IMAGE_XBM.toString(), IMAGE_XBM); types.put(IMAGE_XPM.toString(), IMAGE_XPM); types.put(IMAGE_X_CMU_RASTER.toString(), IMAGE_X_CMU_RASTER); types.put(IMAGE_X_DWG.toString(), IMAGE_X_DWG); types.put(IMAGE_X_ICON.toString(), IMAGE_X_ICON); types.put(IMAGE_X_JG.toString(), IMAGE_X_JG); types.put(IMAGE_X_JPS.toString(), IMAGE_X_JPS); types.put(IMAGE_X_NIFF.toString(), IMAGE_X_NIFF); types.put(IMAGE_X_PCX.toString(), IMAGE_X_PCX); types.put(IMAGE_X_PICT.toString(), IMAGE_X_PICT); types.put(IMAGE_X_PORTABLE_ANYMAP.toString(), IMAGE_X_PORTABLE_ANYMAP); types.put(IMAGE_X_PORTABLE_BITMAP.toString(), IMAGE_X_PORTABLE_BITMAP); types.put(IMAGE_X_PORTABLE_GRAYMAP.toString(), IMAGE_X_PORTABLE_GRAYMAP); types.put(IMAGE_X_PORTABLE_GREYMAP.toString(), IMAGE_X_PORTABLE_GREYMAP); types.put(IMAGE_X_PORTABLE_PIXMAP.toString(), IMAGE_X_PORTABLE_PIXMAP); types.put(IMAGE_X_QUICKTIME.toString(), IMAGE_X_QUICKTIME); types.put(IMAGE_X_RGB.toString(), IMAGE_X_RGB); types.put(IMAGE_X_TIFF.toString(), IMAGE_X_TIFF); types.put(IMAGE_X_WINDOWS_BMP.toString(), IMAGE_X_WINDOWS_BMP); types.put(IMAGE_X_XBITMAP.toString(), IMAGE_X_XBITMAP); types.put(IMAGE_X_XBM.toString(), IMAGE_X_XBM); types.put(IMAGE_X_XPIXMAP.toString(), IMAGE_X_XPIXMAP); types.put(IMAGE_X_XWD.toString(), IMAGE_X_XWD); types.put(IMAGE_X_XWINDOWDUMP.toString(), IMAGE_X_XWINDOWDUMP); types.put(I_WORLD_I_VRML.toString(), I_WORLD_I_VRML); types.put(MESSAGE_RFC822.toString(), MESSAGE_RFC822); types.put(MODEL_IGES.toString(), MODEL_IGES); types.put(MODEL_VND_DWF.toString(), MODEL_VND_DWF); types.put(MODEL_VRML.toString(), MODEL_VRML); types.put(MODEL_X_POV.toString(), MODEL_X_POV); types.put(MULTIPART_X_GZIP.toString(), MULTIPART_X_GZIP); types.put(MULTIPART_X_USTAR.toString(), MULTIPART_X_USTAR); types.put(MULTIPART_X_ZIP.toString(), MULTIPART_X_ZIP); types.put(MUSIC_CRESCENDO.toString(), MUSIC_CRESCENDO); types.put(MUSIC_X_KARAOKE.toString(), MUSIC_X_KARAOKE); types.put(PALEOVU_X_PV.toString(), PALEOVU_X_PV); types.put(TEXT_ASP.toString(), TEXT_ASP); types.put(TEXT_CSS.toString(), TEXT_CSS); types.put(TEXT_ECMASCRIPT.toString(), TEXT_ECMASCRIPT); types.put(TEXT_HTML.toString(), TEXT_HTML); types.put(TEXT_JAVASCRIPT.toString(), TEXT_JAVASCRIPT); types.put(TEXT_MCF.toString(), TEXT_MCF); types.put(TEXT_PASCAL.toString(), TEXT_PASCAL); types.put(TEXT_PLAIN.toString(), TEXT_PLAIN); types.put(TEXT_RICHTEXT.toString(), TEXT_RICHTEXT); types.put(TEXT_SCRIPLET.toString(), TEXT_SCRIPLET); types.put(TEXT_SGML.toString(), TEXT_SGML); types.put(TEXT_TAB_SEPARATED_VALUES.toString(), TEXT_TAB_SEPARATED_VALUES); types.put(TEXT_URI_LIST.toString(), TEXT_URI_LIST); types.put(TEXT_VND_ABC.toString(), TEXT_VND_ABC); types.put(TEXT_VND_FMI_FLEXSTOR.toString(), TEXT_VND_FMI_FLEXSTOR); types.put(TEXT_VND_RN_REALTEXT.toString(), TEXT_VND_RN_REALTEXT); types.put(TEXT_VND_WAP_WML.toString(), TEXT_VND_WAP_WML); types.put(TEXT_VND_WAP_WMLSCRIPT.toString(), TEXT_VND_WAP_WMLSCRIPT); types.put(TEXT_WEBVIEWHTML.toString(), TEXT_WEBVIEWHTML); types.put(TEXT_XML.toString(), TEXT_XML); types.put(TEXT_X_ASM.toString(), TEXT_X_ASM); types.put(TEXT_X_AUDIOSOFT_INTRA.toString(), TEXT_X_AUDIOSOFT_INTRA); types.put(TEXT_X_C.toString(), TEXT_X_C); types.put(TEXT_X_COMPONENT.toString(), TEXT_X_COMPONENT); types.put(TEXT_X_FORTRAN.toString(), TEXT_X_FORTRAN); types.put(TEXT_X_H.toString(), TEXT_X_H); types.put(TEXT_X_JAVA_SOURCE.toString(), TEXT_X_JAVA_SOURCE); types.put(TEXT_X_LA_ASF.toString(), TEXT_X_LA_ASF); types.put(TEXT_X_M.toString(), TEXT_X_M); types.put(TEXT_X_PASCAL.toString(), TEXT_X_PASCAL); types.put(TEXT_X_SCRIPT.toString(), TEXT_X_SCRIPT); types.put(TEXT_X_SCRIPT_CSH.toString(), TEXT_X_SCRIPT_CSH); types.put(TEXT_X_SCRIPT_ELISP.toString(), TEXT_X_SCRIPT_ELISP); types.put(TEXT_X_SCRIPT_GUILE.toString(), TEXT_X_SCRIPT_GUILE); types.put(TEXT_X_SCRIPT_KSH.toString(), TEXT_X_SCRIPT_KSH); types.put(TEXT_X_SCRIPT_LISP.toString(), TEXT_X_SCRIPT_LISP); types.put(TEXT_X_SCRIPT_PERL.toString(), TEXT_X_SCRIPT_PERL); types.put(TEXT_X_SCRIPT_PERL_MODULE.toString(), TEXT_X_SCRIPT_PERL_MODULE); types.put(TEXT_X_SCRIPT_PHYTON.toString(), TEXT_X_SCRIPT_PHYTON); types.put(TEXT_X_SCRIPT_REXX.toString(), TEXT_X_SCRIPT_REXX); types.put(TEXT_X_SCRIPT_SCHEME.toString(), TEXT_X_SCRIPT_SCHEME); types.put(TEXT_X_SCRIPT_SH.toString(), TEXT_X_SCRIPT_SH); types.put(TEXT_X_SCRIPT_TCL.toString(), TEXT_X_SCRIPT_TCL); types.put(TEXT_X_SCRIPT_TCSH.toString(), TEXT_X_SCRIPT_TCSH); types.put(TEXT_X_SCRIPT_ZSH.toString(), TEXT_X_SCRIPT_ZSH); types.put(TEXT_X_SERVER_PARSED_HTML.toString(), TEXT_X_SERVER_PARSED_HTML); types.put(TEXT_X_SETEXT.toString(), TEXT_X_SETEXT); types.put(TEXT_X_SGML.toString(), TEXT_X_SGML); types.put(TEXT_X_SPEECH.toString(), TEXT_X_SPEECH); types.put(TEXT_X_UIL.toString(), TEXT_X_UIL); types.put(TEXT_X_UUENCODE.toString(), TEXT_X_UUENCODE); types.put(TEXT_X_VCALENDAR.toString(), TEXT_X_VCALENDAR); types.put(VIDEO_ANIMAFLEX.toString(), VIDEO_ANIMAFLEX); types.put(VIDEO_AVI.toString(), VIDEO_AVI); types.put(VIDEO_AVS_VIDEO.toString(), VIDEO_AVS_VIDEO); types.put(VIDEO_DL.toString(), VIDEO_DL); types.put(VIDEO_FLI.toString(), VIDEO_FLI); types.put(VIDEO_GL.toString(), VIDEO_GL); types.put(VIDEO_MPEG.toString(), VIDEO_MPEG); types.put(VIDEO_MSVIDEO.toString(), VIDEO_MSVIDEO); types.put(VIDEO_QUICKTIME.toString(), VIDEO_QUICKTIME); types.put(VIDEO_VDO.toString(), VIDEO_VDO); types.put(VIDEO_VIVO.toString(), VIDEO_VIVO); types.put(VIDEO_VND_RN_REALVIDEO.toString(), VIDEO_VND_RN_REALVIDEO); types.put(VIDEO_VND_VIVO.toString(), VIDEO_VND_VIVO); types.put(VIDEO_VOSAIC.toString(), VIDEO_VOSAIC); types.put(VIDEO_X_AMT_DEMORUN.toString(), VIDEO_X_AMT_DEMORUN); types.put(VIDEO_X_AMT_SHOWRUN.toString(), VIDEO_X_AMT_SHOWRUN); types.put(VIDEO_X_ATOMIC3D_FEATURE.toString(), VIDEO_X_ATOMIC3D_FEATURE); types.put(VIDEO_X_DL.toString(), VIDEO_X_DL); types.put(VIDEO_X_DV.toString(), VIDEO_X_DV); types.put(VIDEO_X_FLI.toString(), VIDEO_X_FLI); types.put(VIDEO_X_GL.toString(), VIDEO_X_GL); types.put(VIDEO_X_ISVIDEO.toString(), VIDEO_X_ISVIDEO); types.put(VIDEO_X_MOTION_JPEG.toString(), VIDEO_X_MOTION_JPEG); types.put(VIDEO_X_MPEG.toString(), VIDEO_X_MPEG); types.put(VIDEO_X_MPEQ2A.toString(), VIDEO_X_MPEQ2A); types.put(VIDEO_X_MSVIDEO.toString(), VIDEO_X_MSVIDEO); types.put(VIDEO_X_MS_ASF.toString(), VIDEO_X_MS_ASF); types.put(VIDEO_X_MS_ASF_PLUGIN.toString(), VIDEO_X_MS_ASF_PLUGIN); types.put(VIDEO_X_QTC.toString(), VIDEO_X_QTC); types.put(VIDEO_X_SCM.toString(), VIDEO_X_SCM); types.put(VIDEO_X_SGI_MOVIE.toString(), VIDEO_X_SGI_MOVIE); types.put(WINDOWS_METAFILE.toString(), WINDOWS_METAFILE); types.put(WWW_MIME.toString(), WWW_MIME); types.put(XGL_DRAWING.toString(), XGL_DRAWING); types.put(XGL_MOVIE.toString(), XGL_MOVIE); types.put(X_CONFERENCE_X_COOLTALK.toString(), X_CONFERENCE_X_COOLTALK); types.put(X_MUSIC_X_MIDI.toString(), X_MUSIC_X_MIDI); types.put(X_WORLD_X_3DMF.toString(), X_WORLD_X_3DMF); types.put(X_WORLD_X_SVR.toString(), X_WORLD_X_SVR); types.put(X_WORLD_X_VRML.toString(), X_WORLD_X_VRML); types.put(X_WORLD_X_VRT.toString(), X_WORLD_X_VRT); typesBySuffix.put(_3DM, X_WORLD_X_3DMF); typesBySuffix.put(A, APPLICATION_OCTET_STREAM); typesBySuffix.put(AAB, APPLICATION_X_AUTHORWARE_BIN); typesBySuffix.put(AAM, APPLICATION_X_AUTHORWARE_MAP); typesBySuffix.put(AAS, APPLICATION_X_AUTHORWARE_SEG); typesBySuffix.put(ABC, TEXT_VND_ABC); typesBySuffix.put(ACGI, TEXT_HTML); typesBySuffix.put(AFL, VIDEO_ANIMAFLEX); typesBySuffix.put(AI, APPLICATION_POSTSCRIPT); typesBySuffix.put(AIF, AUDIO_AIFF); typesBySuffix.put(AIM, APPLICATION_X_AIM); typesBySuffix.put(AIP, TEXT_X_AUDIOSOFT_INTRA); typesBySuffix.put(ANI, APPLICATION_X_NAVI_ANIMATION); typesBySuffix.put(AOS, APPLICATION_X_NOKIA_9000_COMMUNICATOR_ADD_ON_SOFTWARE); typesBySuffix.put(APS, APPLICATION_MIME); typesBySuffix.put(ARJ, APPLICATION_ARJ); typesBySuffix.put(ART, IMAGE_X_JG); typesBySuffix.put(ASF, VIDEO_X_MS_ASF); typesBySuffix.put(ASM, TEXT_X_ASM); typesBySuffix.put(ASP, TEXT_ASP); typesBySuffix.put(ASX, APPLICATION_X_MPLAYER2); typesBySuffix.put(AU, AUDIO_BASIC); typesBySuffix.put(AVI, APPLICATION_X_TROFF_MSVIDEO); typesBySuffix.put(AVS, VIDEO_AVS_VIDEO); typesBySuffix.put(BCPIO, APPLICATION_X_BCPIO); typesBySuffix.put(BIN, APPLICATION_MAC_BINARY); typesBySuffix.put(BM, IMAGE_BMP); typesBySuffix.put(BOO, APPLICATION_BOOK); typesBySuffix.put(BOZ, APPLICATION_X_BZIP2); typesBySuffix.put(BSH, APPLICATION_X_BSH); typesBySuffix.put(BZ, APPLICATION_X_BZIP); typesBySuffix.put(C, TEXT_PLAIN); typesBySuffix.put(CAT, APPLICATION_VND_MS_PKI_SECCAT); typesBySuffix.put(CCAD, APPLICATION_CLARISCAD); typesBySuffix.put(CCO, APPLICATION_X_COCOA); typesBySuffix.put(CDF, APPLICATION_CDF); typesBySuffix.put(CER, APPLICATION_PKIX_CERT); typesBySuffix.put(CHA, APPLICATION_X_CHAT); typesBySuffix.put(CLASS, APPLICATION_JAVA); typesBySuffix.put(CPIO, APPLICATION_X_CPIO); typesBySuffix.put(CPT, APPLICATION_MAC_COMPACTPRO); typesBySuffix.put(CRL, APPLICATION_PKCS_CRL); typesBySuffix.put(CSH, APPLICATION_X_CSH); typesBySuffix.put(CSS, TEXT_CSS); typesBySuffix.put(DCR, APPLICATION_X_DIRECTOR); typesBySuffix.put(DEEPV, APPLICATION_X_DEEPV); typesBySuffix.put(DIF, VIDEO_X_DV); typesBySuffix.put(DL, VIDEO_DL); typesBySuffix.put(DOC, APPLICATION_MSWORD); typesBySuffix.put(DP, APPLICATION_COMMONGROUND); typesBySuffix.put(DRW, APPLICATION_DRAFTING); typesBySuffix.put(DVI, APPLICATION_X_DVI); typesBySuffix.put(DWF, DRAWING_X_DWF); typesBySuffix.put(DWG, APPLICATION_ACAD); typesBySuffix.put(DXF, APPLICATION_DXF); typesBySuffix.put(EL, TEXT_X_SCRIPT_ELISP); typesBySuffix.put(ELC, APPLICATION_X_BYTECODE_ELISP); typesBySuffix.put(ENV, APPLICATION_X_ENVOY); typesBySuffix.put(ES, APPLICATION_X_ESREHBER); typesBySuffix.put(ETX, TEXT_X_SETEXT); typesBySuffix.put(EVY, APPLICATION_ENVOY); typesBySuffix.put(FDF, APPLICATION_VND_FDF); typesBySuffix.put(FIF, APPLICATION_FRACTALS); typesBySuffix.put(FLI, VIDEO_FLI); typesBySuffix.put(FLO, IMAGE_FLORIAN); typesBySuffix.put(FLX, TEXT_VND_FMI_FLEXSTOR); typesBySuffix.put(FMF, VIDEO_X_ATOMIC3D_FEATURE); typesBySuffix.put(FPX, IMAGE_VND_FPX); typesBySuffix.put(FRL, APPLICATION_FREELOADER); typesBySuffix.put(FUNK, AUDIO_MAKE); typesBySuffix.put(G3, IMAGE_G3FAX); typesBySuffix.put(GIF, IMAGE_GIF); typesBySuffix.put(GL, VIDEO_GL); typesBySuffix.put(GSD, AUDIO_X_GSM); typesBySuffix.put(GSP, APPLICATION_X_GSP); typesBySuffix.put(GSS, APPLICATION_X_GSS); typesBySuffix.put(GTAR, APPLICATION_X_GTAR); typesBySuffix.put(GZ, APPLICATION_X_COMPRESSED); typesBySuffix.put(HDF, APPLICATION_X_HDF); typesBySuffix.put(HELP, APPLICATION_X_HELPFILE); typesBySuffix.put(HGL, APPLICATION_VND_HP_HPGL); typesBySuffix.put(HLB, TEXT_X_SCRIPT); typesBySuffix.put(HLP, APPLICATION_HLP); typesBySuffix.put(HQX, APPLICATION_BINHEX); typesBySuffix.put(HTA, APPLICATION_HTA); typesBySuffix.put(HTC, TEXT_X_COMPONENT); typesBySuffix.put(HTT, TEXT_WEBVIEWHTML); typesBySuffix.put(ICE, X_CONFERENCE_X_COOLTALK); typesBySuffix.put(ICO, IMAGE_X_ICON); typesBySuffix.put(IEF, IMAGE_IEF); typesBySuffix.put(IGES, APPLICATION_IGES); typesBySuffix.put(IMA, APPLICATION_X_IMA); typesBySuffix.put(IMAP, APPLICATION_X_HTTPD_IMAP); typesBySuffix.put(INF, APPLICATION_INF); typesBySuffix.put(INS, APPLICATION_X_INTERNETT_SIGNUP); typesBySuffix.put(IP, APPLICATION_X_IP2); typesBySuffix.put(ISU, VIDEO_X_ISVIDEO); typesBySuffix.put(IT, AUDIO_IT); typesBySuffix.put(IV, APPLICATION_X_INVENTOR); typesBySuffix.put(IVR, I_WORLD_I_VRML); typesBySuffix.put(IVY, APPLICATION_X_LIVESCREEN); typesBySuffix.put(JAVA, APPLICATION_JAVA); typesBySuffix.put(JAM, AUDIO_X_JAM); typesBySuffix.put(JCM, APPLICATION_X_JAVA_COMMERCE); typesBySuffix.put(JFIF, IMAGE_JPEG); typesBySuffix.put(JPS, IMAGE_X_JPS); typesBySuffix.put(JPEG, IMAGE_JPEG); typesBySuffix.put(JS, APPLICATION_X_JAVASCRIPT); typesBySuffix.put(JUT, IMAGE_JUTVISION); typesBySuffix.put(JSON, APPLICATION_JSON); typesBySuffix.put(KAR, AUDIO_MIDI); typesBySuffix.put(KSH, APPLICATION_X_KSH); typesBySuffix.put(LA, AUDIO_NSPAUDIO); typesBySuffix.put(LAM, AUDIO_X_LIVEAUDIO); typesBySuffix.put(LATEX, APPLICATION_X_LATEX); typesBySuffix.put(LHA, APPLICATION_LHA); typesBySuffix.put(LSP, APPLICATION_X_LISP); typesBySuffix.put(LSX, TEXT_X_LA_ASF); typesBySuffix.put(LZX, APPLICATION_LZX); typesBySuffix.put(M1V, VIDEO_MPEG); typesBySuffix.put(M2A, AUDIO_MPEG); typesBySuffix.put(M3U, AUDIO_X_MPEQURL); typesBySuffix.put(MAN, APPLICATION_X_TROFF_MAN); typesBySuffix.put(MAP, APPLICATION_X_NAVIMAP); typesBySuffix.put(MBD, APPLICATION_MBEDLET); typesBySuffix.put(MC$, APPLICATION_X_MAGIC_CAP_PACKAGE_1_0); typesBySuffix.put(MCD, APPLICATION_MCAD); typesBySuffix.put(MCF, IMAGE_VASA); typesBySuffix.put(MCP, APPLICATION_NETMC); typesBySuffix.put(ME, APPLICATION_X_TROFF_ME); typesBySuffix.put(MHT, MESSAGE_RFC822); typesBySuffix.put(MID, APPLICATION_X_MIDI); typesBySuffix.put(MIF, APPLICATION_X_FRAME); typesBySuffix.put(MJF, AUDIO_X_VND_AUDIOEXPLOSION_MJUICEMEDIAFILE); typesBySuffix.put(MJPG, VIDEO_X_MOTION_JPEG); typesBySuffix.put(MM, APPLICATION_BASE64); typesBySuffix.put(MOD, AUDIO_MOD); typesBySuffix.put(MOOV, VIDEO_QUICKTIME); typesBySuffix.put(MOVIE, VIDEO_X_SGI_MOVIE); typesBySuffix.put(MP3, AUDIO_MPEG3); typesBySuffix.put(MPC, APPLICATION_X_PROJECT); typesBySuffix.put(MPP, APPLICATION_VND_MS_PROJECT); typesBySuffix.put(MRC, APPLICATION_MARC); typesBySuffix.put(MS, APPLICATION_X_TROFF_MS); typesBySuffix.put(MZZ, APPLICATION_X_VND_AUDIOEXPLOSION_MZZ); typesBySuffix.put(NAP, IMAGE_NAPLPS); typesBySuffix.put(NCM, APPLICATION_VND_NOKIA_CONFIGURATION_MESSAGE); typesBySuffix.put(NIF, IMAGE_X_NIFF); typesBySuffix.put(NIX, APPLICATION_X_MIX_TRANSFER); typesBySuffix.put(NSC, APPLICATION_X_CONFERENCE); typesBySuffix.put(NVD, APPLICATION_X_NAVIDOC); typesBySuffix.put(ODA, APPLICATION_ODA); typesBySuffix.put(OMC, APPLICATION_X_OMC); typesBySuffix.put(OMCD, APPLICATION_X_OMCDATAMAKER); typesBySuffix.put(OMCR, APPLICATION_X_OMCREGERATOR); typesBySuffix.put(P, TEXT_X_PASCAL); typesBySuffix.put(P10, APPLICATION_PKCS10); typesBySuffix.put(P12, APPLICATION_PKCS_12); typesBySuffix.put(P7A, APPLICATION_X_PKCS7_SIGNATURE); typesBySuffix.put(P7C, APPLICATION_PKCS7_MIME); typesBySuffix.put(P7R, APPLICATION_X_PKCS7_CERTREQRESP); typesBySuffix.put(P7S, APPLICATION_PKCS7_SIGNATURE); typesBySuffix.put(PART, APPLICATION_PRO_ENG); typesBySuffix.put(PAS, TEXT_PASCAL); typesBySuffix.put(PBM, IMAGE_X_PORTABLE_BITMAP); typesBySuffix.put(PCL, APPLICATION_VND_HP_PCL); typesBySuffix.put(PCT, IMAGE_X_PICT); typesBySuffix.put(PCX, IMAGE_X_PCX); typesBySuffix.put(PDB, CHEMICAL_X_PDB); typesBySuffix.put(PDF, APPLICATION_PDF); typesBySuffix.put(PGM, IMAGE_X_PORTABLE_GRAYMAP); typesBySuffix.put(PIC, IMAGE_PICT); typesBySuffix.put(PKG, APPLICATION_X_NEWTON_COMPATIBLE_PKG); typesBySuffix.put(PKO, APPLICATION_VND_MS_PKI_PKO); typesBySuffix.put(PLX, APPLICATION_X_PIXCLSCRIPT); typesBySuffix.put(PM, IMAGE_X_XPIXMAP); typesBySuffix.put(PM4, APPLICATION_X_PAGEMAKER); typesBySuffix.put(PNG, IMAGE_PNG); typesBySuffix.put(PNM, APPLICATION_X_PORTABLE_ANYMAP); typesBySuffix.put(POT, APPLICATION_MSPOWERPOINT); typesBySuffix.put(POV, MODEL_X_POV); typesBySuffix.put(PPM, IMAGE_X_PORTABLE_PIXMAP); typesBySuffix.put(PRE, APPLICATION_X_FREELANCE); typesBySuffix.put(PVU, PALEOVU_X_PV); typesBySuffix.put(PY, TEXT_X_SCRIPT_PHYTON); typesBySuffix.put(PYC, APPLICATION_X_BYTECODE_PYTHON); typesBySuffix.put(QCP, AUDIO_VND_QCELP); typesBySuffix.put(QIF, IMAGE_X_QUICKTIME); typesBySuffix.put(QTC, VIDEO_X_QTC); typesBySuffix.put(RA, AUDIO_X_PN_REALAUDIO); typesBySuffix.put(RAS, APPLICATION_X_CMU_RASTER); typesBySuffix.put(REXX, TEXT_X_SCRIPT_REXX); typesBySuffix.put(RF, IMAGE_VND_RN_REALFLASH); typesBySuffix.put(RGB, IMAGE_X_RGB); typesBySuffix.put(RM, APPLICATION_VND_RN_REALMEDIA); typesBySuffix.put(RMI, AUDIO_MID); typesBySuffix.put(RNG, APPLICATION_RINGING_TONES); typesBySuffix.put(RNX, APPLICATION_VND_RN_REALPLAYER); typesBySuffix.put(ROFF, APPLICATION_X_TROFF); typesBySuffix.put(RP, IMAGE_VND_RN_REALPIX); typesBySuffix.put(RT, TEXT_RICHTEXT); typesBySuffix.put(RTF, APPLICATION_RTF); typesBySuffix.put(RV, VIDEO_VND_RN_REALVIDEO); typesBySuffix.put(S3M, AUDIO_S3M); typesBySuffix.put(SBK, APPLICATION_X_TBOOK); typesBySuffix.put(SCM, APPLICATION_X_LOTUSSCREENCAM); typesBySuffix.put(SDP, APPLICATION_SDP); typesBySuffix.put(SDR, APPLICATION_SOUNDER); typesBySuffix.put(SEA, APPLICATION_SEA); typesBySuffix.put(SET, APPLICATION_SET); typesBySuffix.put(SGM, TEXT_SGML); typesBySuffix.put(SID, AUDIO_X_PSID); typesBySuffix.put(SIT, APPLICATION_X_SIT); typesBySuffix.put(SKD, APPLICATION_X_KOAN); typesBySuffix.put(SL, APPLICATION_X_SEELOGO); typesBySuffix.put(SMI, APPLICATION_SMIL); typesBySuffix.put(SOL, APPLICATION_SOLIDS); typesBySuffix.put(SPC, APPLICATION_X_PKCS7_CERTIFICATES); typesBySuffix.put(SPL, APPLICATION_FUTURESPLASH); typesBySuffix.put(SPR, APPLICATION_X_SPRITE); typesBySuffix.put(SRC, APPLICATION_X_WAIS_SOURCE); typesBySuffix.put(SSM, APPLICATION_STREAMINGMEDIA); typesBySuffix.put(SST, APPLICATION_VND_MS_PKI_CERTSTORE); typesBySuffix.put(STEP, APPLICATION_STEP); typesBySuffix.put(STL, APPLICATION_SLA); typesBySuffix.put(SV4CPIO, APPLICATION_X_SV4CPIO); typesBySuffix.put(SV4CRC, APPLICATION_X_SV4CRC); typesBySuffix.put(SVR, APPLICATION_X_WORLD); typesBySuffix.put(SWF, APPLICATION_X_SHOCKWAVE_FLASH); typesBySuffix.put(TAR, APPLICATION_X_TAR); typesBySuffix.put(TBK, APPLICATION_TOOLBOOK); typesBySuffix.put(TCL, APPLICATION_X_TCL); typesBySuffix.put(TCSH, TEXT_X_SCRIPT_TCSH); typesBySuffix.put(TEX, APPLICATION_X_TEX); typesBySuffix.put(TEXI, APPLICATION_X_TEXINFO); typesBySuffix.put(TEXT, APPLICATION_PLAIN); typesBySuffix.put(TGZ, APPLICATION_GNUTAR); typesBySuffix.put(TIF, IMAGE_TIFF); typesBySuffix.put(TSI, AUDIO_TSP_AUDIO); typesBySuffix.put(TSP, APPLICATION_DSPTYPE); typesBySuffix.put(TSV, TEXT_TAB_SEPARATED_VALUES); typesBySuffix.put(UIL, TEXT_X_UIL); typesBySuffix.put(UNI, TEXT_URI_LIST); typesBySuffix.put(UNV, APPLICATION_I_DEAS); typesBySuffix.put(USTAR, APPLICATION_X_USTAR); typesBySuffix.put(VCD, APPLICATION_X_CDLINK); typesBySuffix.put(VCS, TEXT_X_VCALENDAR); typesBySuffix.put(VDA, APPLICATION_VDA); typesBySuffix.put(VDO, VIDEO_VDO); typesBySuffix.put(VEW, APPLICATION_GROUPWISE); typesBySuffix.put(VIV, VIDEO_VIVO); typesBySuffix.put(VMD, APPLICATION_VOCALTEC_MEDIA_DESC); typesBySuffix.put(VMF, APPLICATION_VOCALTEC_MEDIA_FILE); typesBySuffix.put(VOC, AUDIO_VOC); typesBySuffix.put(VOS, VIDEO_VOSAIC); typesBySuffix.put(VOX, AUDIO_VOXWARE); typesBySuffix.put(VQE, AUDIO_X_TWINVQ_PLUGIN); typesBySuffix.put(VQF, AUDIO_X_TWINVQ); typesBySuffix.put(VRML, APPLICATION_X_VRML); typesBySuffix.put(VRT, X_WORLD_X_VRT); typesBySuffix.put(VSD, APPLICATION_X_VISIO); typesBySuffix.put(W60, APPLICATION_WORDPERFECT6_0); typesBySuffix.put(W61, APPLICATION_WORDPERFECT6_1); typesBySuffix.put(WAV, AUDIO_WAV); typesBySuffix.put(WB1, APPLICATION_X_QPRO); typesBySuffix.put(WBMP, IMAGE_VND_WAP_WBMP); typesBySuffix.put(WEB, APPLICATION_VND_XARA); typesBySuffix.put(WK1, APPLICATION_X_123); typesBySuffix.put(WMF, WINDOWS_METAFILE); typesBySuffix.put(WML, TEXT_VND_WAP_WML); typesBySuffix.put(WMLC, APPLICATION_VND_WAP_WMLC); typesBySuffix.put(WMLS, TEXT_VND_WAP_WMLSCRIPT); typesBySuffix.put(WMLSC, APPLICATION_VND_WAP_WMLSCRIPTC); typesBySuffix.put(WP, APPLICATION_WORDPERFECT); typesBySuffix.put(WQ1, APPLICATION_X_LOTUS); typesBySuffix.put(WRI, APPLICATION_MSWRITE); typesBySuffix.put(WSC, TEXT_SCRIPLET); typesBySuffix.put(WTK, APPLICATION_X_WINTALK); typesBySuffix.put(XBM, IMAGE_X_XBITMAP); typesBySuffix.put(XDR, VIDEO_X_AMT_DEMORUN); typesBySuffix.put(XGZ, XGL_DRAWING); typesBySuffix.put(XIF, IMAGE_VND_XIFF); typesBySuffix.put(XL, APPLICATION_EXCEL); typesBySuffix.put(XM, AUDIO_XM); typesBySuffix.put(XML, APPLICATION_XML); typesBySuffix.put(XMZ, XGL_MOVIE); typesBySuffix.put(XPIX, APPLICATION_X_VND_LS_XPIX); typesBySuffix.put(XSR, VIDEO_X_AMT_SHOWRUN); typesBySuffix.put(XWD, IMAGE_X_XWD); typesBySuffix.put(Z, APPLICATION_X_COMPRESS); typesBySuffix.put(ZSH, TEXT_X_SCRIPT_ZSH); } private final String text; private MimeType(String text) { this.text = text; } protected MimeType(String text, String... suffixes) { this(text); types.put(text, this); for(String suffix : suffixes) { typesBySuffix.put(suffix, this); } } @Override public final String toString() { return text; } /** * Return the type indexed by the suffix * @param suffix Suffix to find the type. * @return Mime type founded. */ public static MimeType fromSuffix(String suffix) { return typesBySuffix.get(suffix); } /** * Return the type indexed by the suffix * @param suffix Suffix to find the type. * @return Mime type founded. */ public static MimeType fromSuffixIgnoreCase(String suffix) { MimeType result = null; for(String suffixKey : typesBySuffix.keySet()) { if(suffixKey.equalsIgnoreCase(suffix)) { result = typesBySuffix.get(suffixKey); break; } } return result; } /** * Return the type indexed by the text. * @param text Text to find the type. * @return Mime type founded. */ public static MimeType fromString(String text) { return types.get(text); } /** * Return the type indexed by the text. * @param text Text to find the type. * @return Mime type founded. */ public static MimeType fromStringIgnoreCase(String text) { MimeType result = null; for(String textKey : types.keySet()) { if(textKey.equalsIgnoreCase(text)) { result = types.get(textKey); break; } } return result; } }
Adding specific bson content type
src/main/java/org/hcjf/encoding/MimeType.java
Adding specific bson content type
Java
apache-2.0
4514e9d9fbf29204a4c773e8d928f9fc1ea3cb8e
0
Xceptance/XCMailr,Xceptance/XCMailr,Xceptance/XCMailr,Xceptance/XCMailr
/** * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package models; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import javax.persistence.Entity; import javax.persistence.Id; import javax.persistence.Table; import org.joda.time.DateTime; import org.joda.time.Period; import com.avaje.ebean.Ebean; import com.avaje.ebean.Query; import com.avaje.ebean.RawSql; import com.avaje.ebean.RawSqlBuilder; import com.avaje.ebean.SqlUpdate; /** * This Class is used to save all Actions on the Mailserver * * @author Patrick Thum, Xceptance Software Technologies GmbH, Germany */ @Entity @Table(name = "mailtransactions") public class MailTransaction { @Id private Long id; private Long ts; private int status; private String sourceaddr; private String relayaddr; private String targetaddr; /** * the Default-Constructor which initializes all Fields with Default-values */ public MailTransaction() { id = 0L; ts = DateTime.now().getMillis(); status = 0; targetaddr = ""; sourceaddr = ""; } /** * Creates an MailTransaction-Object, with Parameters<br/> * <b>Statuscodes:</b> <br/> * 0 - Mail has a wrong Pattern<br/> * 100 - Mail does not exist<br/> * 200 - Mail exists but is inactive <br/> * 300 - Mail has been forwarded successfully <br/> * 400 - the Mail can't be forwarded (target not reachable)<br/> * 500 - Relay denied (recipient's address does not belong to this server)<br/> * 600 - User is inactive</br> * * @param stat * Statuscode of the Transaction * @param source * the Sender's - Address * @param relay * Relay-Address of the Mail (the mail which is virtually created on this app) * @param target * Original Recipients-Address of the Mail */ public MailTransaction(int stat, String source, String relay, String target) { ts = DateTime.now().getMillis(); this.status = stat; this.targetaddr = target; this.sourceaddr = source; this.relayaddr = relay; } /** * @return the ID of this Transaction */ public Long getId() { return id; } /** * @param id * the ID of this Transaction to set */ public void setId(Long id) { this.id = id; } /** * @return the Timestamp of this Transaction */ public Long getTs() { return ts; } /** * @return the Timestamp as String in the Format "dd.MM.yyyy hh:mm" */ public String getTsAsString() { return new SimpleDateFormat("dd.MM.yyyy HH:mm").format(new Date(this.ts)).toString(); } /** * @param ts * sets the Timestamp in Milliseconds */ public void setTs(Long ts) { this.ts = ts; } /** * <b>Statuscodes:</b> <br/> * 0 - Mail has a wrong Pattern <br/> * 100 - Mail does not exist<br/> * 200 - Mail exists but is inactive <br/> * 300 - Mail has been forwarded successfully <br/> * 400 - the Mail can't be forwarded (target not reachable)<br/> * 500 - Relay denied (recipient's address does not belong to this server)<br/> * 600 - User is inactive</br> * * @return a Statuscode */ public int getStatus() { return status; } /** * <b>Statuscodes:</b> <br/> * 0 - Mail has a wrong Pattern<br/> * 100 - Mail does not exist<br/> * 200 - Mail exists but is inactive <br/> * 300 - Mail has been forwarded successfully <br/> * 400 - the Mail can't be forwarded (target not reachable)<br/> * 500 - Relay denied (recipient's address does not belong to this server)<br/> * 600 - User is inactive</br> * * @param status * the Status to set */ public void setStatus(int status) { this.status = status; } /** * @return the Target-Address of this Transaction */ public String getTargetaddr() { return targetaddr; } /** * @param targetaddr * the Target-Address to set */ public void setTargetaddr(String targetaddr) { this.targetaddr = targetaddr; } /** * @return the Source-Address of this transaction */ public String getSourceaddr() { return sourceaddr; } /** * @param sourceaddr * the Source-Address to set */ public void setSourceaddr(String sourceaddr) { this.sourceaddr = sourceaddr; } /** * @return the Relay-Address of this transaction (if existent) */ public String getRelayaddr() { return relayaddr; } /** * @param relayaddr * the Relay-Address of this transaction (if existent) */ public void setRelayaddr(String relayaddr) { this.relayaddr = relayaddr; } // ------------------------------------------------------- // E-Bean Functions // ------------------------------------------------------- /** * @return all Transactions which were stored in the Database */ public static List<MailTransaction> all() { return Ebean.find(MailTransaction.class).findList(); } /** * @param sortage * a String which indicates the sortage of the returned list, the string should be in the form "fieldname * asc" or "fieldname desc" * @return a sorted list of all MailTransactions */ public static List<MailTransaction> all(String sortage) { List<MailTransaction> list = Ebean.find(MailTransaction.class).where().orderBy(sortage).findList(); return list; } /** * Gets all Mail-Transactions in the last "Period" * * @param period * Joda-Time Period * @return a List of Mail-Transactions */ public static List<MailTransaction> getAllInPeriod(Period period) { return Ebean.find(MailTransaction.class).where().gt("ts", DateTime.now().minus(period).getMillis()).findList(); } /** * returns a list of MailTransactions sorted descending and limited by the given number * * @param limit * the maximal row number * @return a sorted list of all MailTransactions */ public static List<MailTransaction> getSortedAndLimitedList(int limit) { List<MailTransaction> list = Ebean.find(MailTransaction.class).where().orderBy("ts desc").setMaxRows(limit) .findList(); return list; } /** * returns a list of MailTransactions with the given target address * * @param targetAddr * the target address * @return sorted list of MailTransactions with given target address */ public static List<MailTransaction> getForTarget(final String targetAddr) { List<MailTransaction> list = Ebean.find(MailTransaction.class).where().eq("targetaddr", targetAddr) .orderBy("ts desc").findList(); return list; } /** * returns a list of MailTransactions with the given relay address * * @param relayAddr * the relay address * @return sorted list of MailTransactions with given target address */ public static List<MailTransaction> getForRelay(final String relayAddr) { List<MailTransaction> list = Ebean.find(MailTransaction.class).where().eq("relayaddr", relayAddr) .orderBy("ts desc").findList(); return list; } /** * returns a list of MailTransactions with the given source address * * @param sourceAddr * the source address * @return sorted list of MailTransactions with given target address */ public static List<MailTransaction> getForSource(final String sourceAddr) { List<MailTransaction> list = Ebean.find(MailTransaction.class).where().eq("sourceaddr", sourceAddr) .orderBy("ts desc").findList(); return list; } /** * Deletes all Transactions that have been stored before the given Timestamp * * @param ts * the Timestamp in milliseconds */ public static void deleteTxInPeriod(Long ts) { String sql = "DELETE FROM MAILTRANSACTIONS"; if (ts != null) { // there's a timestamp, add sql += " WHERE ts < " + ts; } SqlUpdate down = Ebean.createSqlUpdate(sql); down.execute(); } /** * returns a specific MailTransaction that belongs to the ID * * @param id * the ID of an MailTransaction * @return a MailTransaction */ public static MailTransaction getById(long id) { return Ebean.find(MailTransaction.class, id); } /** * Generates a List of Status-Numbers and the Number of their occurrences * * @return a List of Status-Elements (as an aggregate of Transactions) * @see Status */ public static List<Status> getStatusList() { // create a sql-query that contains the statuscode and their number of occurences String sql = "SELECT mtx.status, COUNT(mtx.status) AS count FROM mailtransactions mtx GROUP BY mtx.status"; RawSql rawSql = RawSqlBuilder.parse(sql).columnMapping("mtx.status", "statuscode").create(); Query<Status> query = Ebean.find(Status.class); query.setRawSql(rawSql); List<Status> list = query.findList(); return list; } /** * Saves the Transaction in the Database */ public void save() { Ebean.save(this); } /** * saves multiple elements * * @param mtxList */ public static void saveMultipleTx(List<MailTransaction> mtxList) { Ebean.save(mtxList); } }
xcmailr-webapp/src/main/java/models/MailTransaction.java
/** * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package models; import java.util.List; import javax.persistence.Entity; import javax.persistence.Id; import javax.persistence.Table; import org.joda.time.DateTime; import org.joda.time.Period; import com.avaje.ebean.Ebean; import com.avaje.ebean.Query; import com.avaje.ebean.RawSql; import com.avaje.ebean.RawSqlBuilder; import com.avaje.ebean.SqlUpdate; /** * This Class is used to save all Actions on the Mailserver * * @author Patrick Thum, Xceptance Software Technologies GmbH, Germany */ @Entity @Table(name = "mailtransactions") public class MailTransaction { @Id private Long id; private Long ts; private int status; private String sourceaddr; private String relayaddr; private String targetaddr; /** * the Default-Constructor which initializes all Fields with Default-values */ public MailTransaction() { id = 0L; ts = DateTime.now().getMillis(); status = 0; targetaddr = ""; sourceaddr = ""; } /** * Creates an MailTransaction-Object, with Parameters<br/> * <b>Statuscodes:</b> <br/> * 0 - Mail has a wrong Pattern<br/> * 100 - Mail does not exist<br/> * 200 - Mail exists but is inactive <br/> * 300 - Mail has been forwarded successfully <br/> * 400 - the Mail can't be forwarded (target not reachable)<br/> * 500 - Relay denied (recipient's address does not belong to this server)<br/> * 600 - User is inactive</br> * * @param stat * Statuscode of the Transaction * @param source * the Sender's - Address * @param relay * Relay-Address of the Mail (the mail which is virtually created on this app) * @param target * Original Recipients-Address of the Mail */ public MailTransaction(int stat, String source, String relay, String target) { ts = DateTime.now().getMillis(); this.status = stat; this.targetaddr = target; this.sourceaddr = source; this.relayaddr = relay; } /** * @return the ID of this Transaction */ public Long getId() { return id; } /** * @param id * the ID of this Transaction to set */ public void setId(Long id) { this.id = id; } /** * @return the Timestamp of this Transaction */ public Long getTs() { return ts; } /** * @return the Timestamp as String in the Format "dd.MM.yyyy hh:mm" */ public String getTsAsString() { DateTime dt = new DateTime(this.ts); String day = ""; String mon = ""; String hou = ""; String min = ""; // add a leading "0" if the value is under ten if (dt.getDayOfMonth() < 10) { day += "0"; } day += String.valueOf(dt.getDayOfMonth()); if (dt.getMonthOfYear() < 10) { mon += "0"; } mon += String.valueOf(dt.getMonthOfYear()); if (dt.getHourOfDay() < 10) { hou += "0"; } hou += String.valueOf(dt.getHourOfDay()); if (dt.getMinuteOfHour() < 10) { min += "0"; } min += String.valueOf(dt.getMinuteOfHour()); return day + "." + mon + "." + dt.getYear() + " " + hou + ":" + min; } /** * @param ts * sets the Timestamp in Milliseconds */ public void setTs(Long ts) { this.ts = ts; } /** * <b>Statuscodes:</b> <br/> * 0 - Mail has a wrong Pattern <br/> * 100 - Mail does not exist<br/> * 200 - Mail exists but is inactive <br/> * 300 - Mail has been forwarded successfully <br/> * 400 - the Mail can't be forwarded (target not reachable)<br/> * 500 - Relay denied (recipient's address does not belong to this server)<br/> * 600 - User is inactive</br> * * @return a Statuscode */ public int getStatus() { return status; } /** * <b>Statuscodes:</b> <br/> * 0 - Mail has a wrong Pattern<br/> * 100 - Mail does not exist<br/> * 200 - Mail exists but is inactive <br/> * 300 - Mail has been forwarded successfully <br/> * 400 - the Mail can't be forwarded (target not reachable)<br/> * 500 - Relay denied (recipient's address does not belong to this server)<br/> * 600 - User is inactive</br> * * @param status * the Status to set */ public void setStatus(int status) { this.status = status; } /** * @return the Target-Address of this Transaction */ public String getTargetaddr() { return targetaddr; } /** * @param targetaddr * the Target-Address to set */ public void setTargetaddr(String targetaddr) { this.targetaddr = targetaddr; } /** * @return the Source-Address of this transaction */ public String getSourceaddr() { return sourceaddr; } /** * @param sourceaddr * the Source-Address to set */ public void setSourceaddr(String sourceaddr) { this.sourceaddr = sourceaddr; } /** * @return the Relay-Address of this transaction (if existent) */ public String getRelayaddr() { return relayaddr; } /** * @param relayaddr * the Relay-Address of this transaction (if existent) */ public void setRelayaddr(String relayaddr) { this.relayaddr = relayaddr; } // ------------------------------------------------------- // E-Bean Functions // ------------------------------------------------------- /** * @return all Transactions which were stored in the Database */ public static List<MailTransaction> all() { return Ebean.find(MailTransaction.class).findList(); } /** * @param sortage * a String which indicates the sortage of the returned list, the string should be in the form "fieldname * asc" or "fieldname desc" * @return a sorted list of all MailTransactions */ public static List<MailTransaction> all(String sortage) { List<MailTransaction> list = Ebean.find(MailTransaction.class).where().orderBy(sortage).findList(); return list; } /** * Gets all Mail-Transactions in the last "Period" * * @param period * Joda-Time Period * @return a List of Mail-Transactions */ public static List<MailTransaction> getAllInPeriod(Period period) { return Ebean.find(MailTransaction.class).where().gt("ts", DateTime.now().minus(period).getMillis()).findList(); } /** * returns a list of MailTransactions sorted descending and limited by the given number * * @param limit * the maximal row number * @return a sorted list of all MailTransactions */ public static List<MailTransaction> getSortedAndLimitedList(int limit) { List<MailTransaction> list = Ebean.find(MailTransaction.class).where().orderBy("ts desc").setMaxRows(limit) .findList(); return list; } /** * returns a list of MailTransactions with the given target address * * @param targetAddr * the target address * @return sorted list of MailTransactions with given target address */ public static List<MailTransaction> getForTarget(final String targetAddr) { List<MailTransaction> list = Ebean.find(MailTransaction.class).where().eq("targetaddr", targetAddr) .orderBy("ts desc").findList(); return list; } /** * returns a list of MailTransactions with the given relay address * * @param relayAddr * the relay address * @return sorted list of MailTransactions with given target address */ public static List<MailTransaction> getForRelay(final String relayAddr) { List<MailTransaction> list = Ebean.find(MailTransaction.class).where().eq("relayaddr", relayAddr) .orderBy("ts desc").findList(); return list; } /** * returns a list of MailTransactions with the given source address * * @param sourceAddr * the source address * @return sorted list of MailTransactions with given target address */ public static List<MailTransaction> getForSource(final String sourceAddr) { List<MailTransaction> list = Ebean.find(MailTransaction.class).where().eq("sourceaddr", sourceAddr) .orderBy("ts desc").findList(); return list; } /** * Deletes all Transactions that have been stored before the given Timestamp * * @param ts * the Timestamp in milliseconds */ public static void deleteTxInPeriod(Long ts) { String sql = "DELETE FROM MAILTRANSACTIONS"; if (ts != null) { // there's a timestamp, add sql += " WHERE ts < " + ts; } SqlUpdate down = Ebean.createSqlUpdate(sql); down.execute(); } /** * returns a specific MailTransaction that belongs to the ID * * @param id * the ID of an MailTransaction * @return a MailTransaction */ public static MailTransaction getById(long id) { return Ebean.find(MailTransaction.class, id); } /** * Generates a List of Status-Numbers and the Number of their occurrences * * @return a List of Status-Elements (as an aggregate of Transactions) * @see Status */ public static List<Status> getStatusList() { // create a sql-query that contains the statuscode and their number of occurences String sql = "SELECT mtx.status, COUNT(mtx.status) AS count FROM mailtransactions mtx GROUP BY mtx.status"; RawSql rawSql = RawSqlBuilder.parse(sql).columnMapping("mtx.status", "statuscode").create(); Query<Status> query = Ebean.find(Status.class); query.setRawSql(rawSql); List<Status> list = query.findList(); return list; } /** * Saves the Transaction in the Database */ public void save() { Ebean.save(this); } /** * saves multiple elements * * @param mtxList */ public static void saveMultipleTx(List<MailTransaction> mtxList) { Ebean.save(mtxList); } }
refactored date function
xcmailr-webapp/src/main/java/models/MailTransaction.java
refactored date function
Java
apache-2.0
c1abf353a135b68198d85484885674500fd1a339
0
rajashekharmunthakewill/drools,ngs-mtech/drools,TonnyFeng/drools,ngs-mtech/drools,lanceleverich/drools,ThiagoGarciaAlves/drools,vinodkiran/drools,ThomasLau/drools,292388900/drools,ngs-mtech/drools,prabasn/drools,rajashekharmunthakewill/drools,reynoldsm88/drools,manstis/drools,jiripetrlik/drools,rajashekharmunthakewill/drools,romartin/drools,reynoldsm88/drools,ThomasLau/drools,prabasn/drools,OnePaaS/drools,winklerm/drools,prabasn/drools,yurloc/drools,reynoldsm88/drools,292388900/drools,psiroky/drools,292388900/drools,vinodkiran/drools,mrietveld/drools,mrrodriguez/drools,lanceleverich/drools,HHzzhz/drools,mswiderski/drools,iambic69/drools,ThiagoGarciaAlves/drools,amckee23/drools,292388900/drools,winklerm/drools,pperboires/PocDrools,jomarko/drools,sutaakar/drools,mrietveld/drools,kevinpeterson/drools,kedzie/drools-android,amckee23/drools,OnePaaS/drools,yurloc/drools,droolsjbpm/drools,lanceleverich/drools,jomarko/drools,romartin/drools,manstis/drools,kevinpeterson/drools,liupugong/drools,HHzzhz/drools,ThomasLau/drools,sotty/drools,jomarko/drools,droolsjbpm/drools,ChallenHB/drools,mrietveld/drools,kedzie/drools-android,prabasn/drools,psiroky/drools,HHzzhz/drools,iambic69/drools,psiroky/drools,sutaakar/drools,sutaakar/drools,mswiderski/drools,liupugong/drools,droolsjbpm/drools,TonnyFeng/drools,sotty/drools,amckee23/drools,jiripetrlik/drools,iambic69/drools,mswiderski/drools,vinodkiran/drools,manstis/drools,kevinpeterson/drools,kedzie/drools-android,kevinpeterson/drools,mrrodriguez/drools,ThomasLau/drools,pperboires/PocDrools,amckee23/drools,ThomasLau/drools,romartin/drools,jiripetrlik/drools,OnePaaS/drools,Buble1981/MyDroolsFork,reynoldsm88/drools,ThiagoGarciaAlves/drools,kedzie/drools-android,mrrodriguez/drools,winklerm/drools,romartin/drools,sotty/drools,prabasn/drools,winklerm/drools,TonnyFeng/drools,jomarko/drools,reynoldsm88/drools,liupugong/drools,lanceleverich/drools,ChallenHB/drools,292388900/drools,winklerm/drools,sutaakar/drools,ngs-mtech/drools,jiripetrlik/drools,HHzzhz/drools,pwachira/droolsexamples,jomarko/drools,mrrodriguez/drools,sutaakar/drools,liupugong/drools,manstis/drools,rajashekharmunthakewill/drools,yurloc/drools,mrrodriguez/drools,jiripetrlik/drools,OnePaaS/drools,OnePaaS/drools,TonnyFeng/drools,manstis/drools,mrietveld/drools,TonnyFeng/drools,kedzie/drools-android,liupugong/drools,mrietveld/drools,iambic69/drools,romartin/drools,psiroky/drools,ThiagoGarciaAlves/drools,Buble1981/MyDroolsFork,Buble1981/MyDroolsFork,mswiderski/drools,rajashekharmunthakewill/drools,lanceleverich/drools,ChallenHB/drools,HHzzhz/drools,sotty/drools,droolsjbpm/drools,ngs-mtech/drools,amckee23/drools,droolsjbpm/drools,Buble1981/MyDroolsFork,vinodkiran/drools,vinodkiran/drools,ChallenHB/drools,yurloc/drools,kevinpeterson/drools,ChallenHB/drools,sotty/drools,pperboires/PocDrools,ThiagoGarciaAlves/drools,iambic69/drools,pperboires/PocDrools
package org.drools.rule.builder.dialect.mvel; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import org.drools.base.mvel.DroolsMVELFactory; import org.drools.base.mvel.MVELConsequence; import org.drools.compiler.Dialect; import org.drools.compiler.RuleError; import org.drools.rule.builder.ConsequenceBuilder; import org.drools.rule.builder.RuleBuildContext; import org.mvel.Macro; import org.mvel.MacroProcessor; public class MVELConsequenceBuilder implements ConsequenceBuilder { //private final Interceptor assertInterceptor; //private final Interceptor modifyInterceptor; private final Map macros; public MVELConsequenceBuilder() { macros = new HashMap( 4 ); macros.put( "insert", new Macro() { public String doMacro() { return "drools.insert"; } } ); macros.put( "insertLogical", new Macro() { public String doMacro() { return "drools.insertLogical"; } } ); macros.put( "modify", new Macro() { public String doMacro() { return "@Modify with"; } } ); macros.put( "update", new Macro() { public String doMacro() { return "drools.update"; } } ); macros.put( "retract", new Macro() { public String doMacro() { return "drools.retract"; } } ); } public void build(final RuleBuildContext context) { // pushing consequence LHS into the stack for variable resolution context.getBuildStack().push( context.getRule().getLhs() ); try { MVELDialect dialect = (MVELDialect) context.getDialect(); String text = processMacros( (String) context.getRuleDescr().getConsequence() ); Dialect.AnalysisResult analysis = dialect.analyzeBlock( context, context.getRuleDescr(), dialect.getInterceptors(), text, null ); final Serializable expr = dialect.compile( text, analysis, dialect.getInterceptors(), null, context ); final DroolsMVELFactory factory = new DroolsMVELFactory( context.getDeclarationResolver().getDeclarations(), null, context.getPkg().getGlobals(), analysis.getBoundIdentifiers() ); context.getRule().setConsequence( new MVELConsequence( expr, factory ) ); } catch ( final Exception e ) { context.getErrors().add( new RuleError( context.getRule(), context.getRuleDescr(), null, "Unable to build expression for 'consequence' '" + context.getRuleDescr().getConsequence() + "'" ) ); } } public String processMacros(String consequence) { MacroProcessor macroProcessor = new MacroProcessor(); macroProcessor.setMacros( macros ); return macroProcessor.parse( delimitExpressions( consequence ) ); } /** * Allows newlines to demarcate expressions, as per MVEL command line. * If expression spans multiple lines (ie inside an unbalanced bracket) then * it is left alone. * Uses character based iteration which is at least an order of magnitude faster then a single * simple regex. */ public static String delimitExpressions(String s) { StringBuffer result = new StringBuffer(); char[] cs = s.toCharArray(); int brace = 0; int sqre = 0; int crly = 0; char lastNonWhite = ';'; for ( int i = 0; i < cs.length; i++ ) { char c = cs[i]; switch ( c ) { case '(' : brace++; break; case '{' : crly++; break; case '[' : sqre++; break; case ')' : brace--; break; case '}' : crly--; break; case ']' : sqre--; break; default : break; } if ( (brace == 0 && sqre == 0 && crly == 0) && (c == '\n' || c == '\r') ) { if ( lastNonWhite != ';' ) { result.append( ';' ); lastNonWhite = ';'; } } else if ( !Character.isWhitespace( c ) ) { lastNonWhite = c; } result.append( c ); } return result.toString(); } }
drools-compiler/src/main/java/org/drools/rule/builder/dialect/mvel/MVELConsequenceBuilder.java
package org.drools.rule.builder.dialect.mvel; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import org.drools.base.mvel.DroolsMVELFactory; import org.drools.base.mvel.MVELConsequence; import org.drools.compiler.Dialect; import org.drools.compiler.RuleError; import org.drools.rule.builder.ConsequenceBuilder; import org.drools.rule.builder.RuleBuildContext; import org.mvel.Macro; import org.mvel.MacroProcessor; public class MVELConsequenceBuilder implements ConsequenceBuilder { //private final Interceptor assertInterceptor; //private final Interceptor modifyInterceptor; private final Map macros; public MVELConsequenceBuilder() { macros = new HashMap( 4 ); macros.put( "insert", new Macro() { public String doMacro() { return "drools.insert"; } } ); macros.put( "modify", new Macro() { public String doMacro() { return "@Modify with"; } } ); macros.put( "update", new Macro() { public String doMacro() { return "drools.update"; } } ); macros.put( "retract", new Macro() { public String doMacro() { return "drools.retract"; } } ); } public void build(final RuleBuildContext context) { // pushing consequence LHS into the stack for variable resolution context.getBuildStack().push( context.getRule().getLhs() ); try { MVELDialect dialect = (MVELDialect) context.getDialect(); String text = processMacros( (String) context.getRuleDescr().getConsequence() ); Dialect.AnalysisResult analysis = dialect.analyzeBlock( context, context.getRuleDescr(), dialect.getInterceptors(), text, null ); final Serializable expr = dialect.compile( text, analysis, dialect.getInterceptors(), null, context ); final DroolsMVELFactory factory = new DroolsMVELFactory( context.getDeclarationResolver().getDeclarations(), null, context.getPkg().getGlobals(), analysis.getBoundIdentifiers() ); context.getRule().setConsequence( new MVELConsequence( expr, factory ) ); } catch ( final Exception e ) { context.getErrors().add( new RuleError( context.getRule(), context.getRuleDescr(), null, "Unable to build expression for 'consequence' '" + context.getRuleDescr().getConsequence() + "'" ) ); } } public String processMacros(String consequence) { MacroProcessor macroProcessor = new MacroProcessor(); macroProcessor.setMacros( macros ); return macroProcessor.parse( delimitExpressions( consequence ) ); } /** * Allows newlines to demarcate expressions, as per MVEL command line. * If expression spans multiple lines (ie inside an unbalanced bracket) then * it is left alone. * Uses character based iteration which is at least an order of magnitude faster then a single * simple regex. */ public static String delimitExpressions(String s) { StringBuffer result = new StringBuffer(); char[] cs = s.toCharArray(); int brace = 0; int sqre = 0; int crly = 0; char lastNonWhite = ';'; for ( int i = 0; i < cs.length; i++ ) { char c = cs[i]; switch ( c ) { case '(' : brace++; break; case '{' : crly++; break; case '[' : sqre++; break; case ')' : brace--; break; case '}' : crly--; break; case ']' : sqre--; break; default : break; } if ( (brace == 0 && sqre == 0 && crly == 0) && (c == '\n' || c == '\r') ) { if ( lastNonWhite != ';' ) { result.append( ';' ); lastNonWhite = ';'; } } else if ( !Character.isWhitespace( c ) ) { lastNonWhite = c; } result.append( c ); } return result.toString(); } }
-added support for insertLogical with the mvel dialect macro processor git-svn-id: a243bed356d289ca0d1b6d299a0597bdc4ecaa09@15240 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70
drools-compiler/src/main/java/org/drools/rule/builder/dialect/mvel/MVELConsequenceBuilder.java
-added support for insertLogical with the mvel dialect macro processor
Java
apache-2.0
2978b79186457c600db0dbab7a07eb25da41c40c
0
webbukkit/DynmapCore,mikeprimm/DynmapCore,webbukkit/DynmapCore,KovuTheHusky/DynmapCore,webbukkit/DynmapCore,webbukkit/DynmapCore,mikeprimm/DynmapCore,instantdelay/DynmapCore,KovuTheHusky/DynmapCore,KovuTheHusky/DynmapCore,instantdelay/DynmapCore,mikeprimm/DynmapCore,mikeprimm/DynmapCore,instantdelay/DynmapCore,KovuTheHusky/DynmapCore,instantdelay/DynmapCore
package org.dynmap.hdmap; import java.awt.image.BufferedImage; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.LineNumberReader; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import javax.imageio.ImageIO; import org.dynmap.Color; import org.dynmap.ConfigurationNode; import org.dynmap.DynmapCore; import org.dynmap.Log; import org.dynmap.MapManager; import org.dynmap.common.BiomeMap; import org.dynmap.renderer.CustomColorMultiplier; import org.dynmap.utils.BlockStep; import org.dynmap.utils.DynIntHashMap; import org.dynmap.utils.ForgeConfigFile; import org.dynmap.utils.MapIterator; /** * Loader and processor class for minecraft texture packs * Texture packs are found in dynmap/texturepacks directory, and either are either ZIP files * or are directories whose content matches the structure of a zipped texture pack: * ./terrain.png - main color data (required) * misc/grasscolor.png - tone for grass color, biome sensitive (required) * misc/foliagecolor.png - tone for leaf color, biome sensitive (required) * custom_lava_still.png - custom still lava animation (optional) * custom_lava_flowing.png - custom flowing lava animation (optional) * custom_water_still.png - custom still water animation (optional) * custom_water_flowing.png - custom flowing water animation (optional) * misc/watercolorX.png - custom water color multiplier (optional) * misc/swampgrasscolor.png - tone for grass color in swamps (optional) * misc/swampfoliagecolor.png - tone for leaf color in swamps (optional) */ public class TexturePack { /* Loaded texture packs */ private static HashMap<String, TexturePack> packs = new HashMap<String, TexturePack>(); private static Object packlock = new Object(); private static final String TERRAIN_PNG = "terrain.png"; private static final String GRASSCOLOR_PNG = "misc/grasscolor.png"; private static final String FOLIAGECOLOR_PNG = "misc/foliagecolor.png"; private static final String WATERCOLORX_PNG = "misc/watercolorX.png"; private static final String CUSTOMLAVASTILL_PNG = "custom_lava_still.png"; private static final String CUSTOMLAVAFLOWING_PNG = "custom_lava_flowing.png"; private static final String CUSTOMWATERSTILL_PNG = "custom_water_still.png"; private static final String CUSTOMWATERFLOWING_PNG = "custom_water_flowing.png"; private static final String SWAMPGRASSCOLOR_PNG = "misc/swampgrasscolor.png"; private static final String SWAMPFOLIAGECOLOR_PNG = "misc/swampfoliagecolor.png"; private static final String STANDARDTP = "standard"; /* Color modifier codes (x1000 for value in definition file, x1000000 for internal value) */ //private static final int COLORMOD_NONE = 0; private static final int COLORMOD_GRASSTONED = 1; private static final int COLORMOD_FOLIAGETONED = 2; private static final int COLORMOD_WATERTONED = 3; private static final int COLORMOD_ROT90 = 4; private static final int COLORMOD_ROT180 = 5; private static final int COLORMOD_ROT270 = 6; private static final int COLORMOD_FLIPHORIZ = 7; private static final int COLORMOD_SHIFTDOWNHALF = 8; private static final int COLORMOD_SHIFTDOWNHALFANDFLIPHORIZ = 9; private static final int COLORMOD_INCLINEDTORCH = 10; private static final int COLORMOD_GRASSSIDE = 11; private static final int COLORMOD_CLEARINSIDE = 12; private static final int COLORMOD_PINETONED = 13; private static final int COLORMOD_BIRCHTONED = 14; private static final int COLORMOD_LILYTONED = 15; //private static final int COLORMOD_OLD_WATERSHADED = 16; private static final int COLORMOD_MULTTONED = 17; /* Toned with colorMult or custColorMult - not biome-style */ private static final int COLORMOD_GRASSTONED270 = 18; // GRASSTONED + ROT270 private static final int COLORMOD_FOLIAGETONED270 = 19; // FOLIAGETONED + ROT270 private static final int COLORMOD_WATERTONED270 = 20; // WATERTONED + ROT270 private static final int COLORMOD_MULTTONED_CLEARINSIDE = 21; // MULTTONED + CLEARINSIDE private static final int COLORMOD_FOLIAGEMULTTONED = 22; // FOLIAGETONED + colorMult or custColorMult private static final int COLORMOD_MULT_FILE = 1000; private static final int COLORMOD_MULT_INTERNAL = 1000000; /* Special tile index values */ private static final int TILEINDEX_BLANK = -1; private static final int TILEINDEX_GRASS = 0; private static final int TILEINDEX_GRASSMASK = 38; private static final int TILEINDEX_SNOW = 66; private static final int TILEINDEX_SNOWSIDE = 68; private static final int TILEINDEX_PISTONSIDE = 108; private static final int TILEINDEX_GLASSPANETOP = 148; private static final int TILEINDEX_AIRFRAME = 158; private static final int TILEINDEX_REDSTONE_NSEW_TONE = 164; private static final int TILEINDEX_REDSTONE_EW_TONE = 165; private static final int TILEINDEX_EYEOFENDER = 174; private static final int TILEINDEX_REDSTONE_NSEW = 180; private static final int TILEINDEX_REDSTONE_EW = 181; private static final int TILEINDEX_STATIONARYWATER = 257; private static final int TILEINDEX_MOVINGWATER = 258; private static final int TILEINDEX_STATIONARYLAVA = 259; private static final int TILEINDEX_MOVINGLAVA = 260; private static final int TILEINDEX_PISTONEXTSIDE = 261; private static final int TILEINDEX_PISTONSIDE_EXT = 262; private static final int TILEINDEX_PANETOP_X = 263; private static final int TILEINDEX_AIRFRAME_EYE = 264; private static final int TILEINDEX_FIRE = 265; private static final int TILEINDEX_PORTAL = 266; private static final int MAX_TILEINDEX = 266; /* Index of last static tile definition */ private static final int TILETABLE_LEN = 5000; /* Leave room for dynmaic tiles */ /* Indexes of faces in a CHEST format tile file */ private static final int TILEINDEX_CHEST_TOP = 0; private static final int TILEINDEX_CHEST_LEFT = 1; private static final int TILEINDEX_CHEST_RIGHT = 2; private static final int TILEINDEX_CHEST_FRONT = 3; private static final int TILEINDEX_CHEST_BACK = 4; private static final int TILEINDEX_CHEST_BOTTOM = 5; private static final int TILEINDEX_CHEST_COUNT = 6; /* Indexes of faces in a BIGCHEST format tile file */ private static final int TILEINDEX_BIGCHEST_TOPLEFT = 0; private static final int TILEINDEX_BIGCHEST_TOPRIGHT = 1; private static final int TILEINDEX_BIGCHEST_FRONTLEFT = 2; private static final int TILEINDEX_BIGCHEST_FRONTRIGHT = 3; private static final int TILEINDEX_BIGCHEST_LEFT = 4; private static final int TILEINDEX_BIGCHEST_RIGHT = 5; private static final int TILEINDEX_BIGCHEST_BACKLEFT = 6; private static final int TILEINDEX_BIGCHEST_BACKRIGHT = 7; private static final int TILEINDEX_BIGCHEST_BOTTOMLEFT = 8; private static final int TILEINDEX_BIGCHEST_BOTTOMRIGHT = 9; private static final int TILEINDEX_BIGCHEST_COUNT = 10; /* Indexes of faces in the SIGN format tile file */ private static final int TILEINDEX_SIGN_FRONT = 0; private static final int TILEINDEX_SIGN_BACK = 1; private static final int TILEINDEX_SIGN_TOP = 2; private static final int TILEINDEX_SIGN_BOTTOM = 3; private static final int TILEINDEX_SIGN_LEFTSIDE = 4; private static final int TILEINDEX_SIGN_RIGHTSIDE = 5; private static final int TILEINDEX_SIGN_POSTFRONT = 6; private static final int TILEINDEX_SIGN_POSTBACK = 7; private static final int TILEINDEX_SIGN_POSTLEFT = 8; private static final int TILEINDEX_SIGN_POSTRIGHT = 9; private static final int TILEINDEX_SIGN_COUNT = 10; /* Indexes of faces in the SKIN format tile file */ private static final int TILEINDEX_SKIN_FACEFRONT = 0; private static final int TILEINDEX_SKIN_FACELEFT = 1; private static final int TILEINDEX_SKIN_FACERIGHT = 2; private static final int TILEINDEX_SKIN_FACEBACK = 3; private static final int TILEINDEX_SKIN_FACETOP = 4; private static final int TILEINDEX_SKIN_FACEBOTTOM = 5; private static final int TILEINDEX_SKIN_COUNT = 6; private static final int BLOCKTABLELEN = 256; /* Enough for normal block IDs */ public static enum TileFileFormat { GRID, CHEST, BIGCHEST, SIGN, SKIN, CUSTOM, TILESET, BIOME }; /* Map of 1.5 texture files to 0-255 texture indices */ private static final String[] terrain_map = { "grass_top", "stone", "dirt", "grass_side", "wood", "stoneslab_side", "stoneslab_top", "brick", "tnt_side", "tnt_top", "tnt_bottom", "web", "rose", "flower", "portal", "sapling", "stonebrick", "bedrock", "sand", "gravel", "tree_side", "tree_top", "blockIron", "blockGold", "blockDiamond", "blockEmerald", null, null, "mushroom_red", "mushroom_brown", "sapling_jungle", null, "oreGold", "oreIron", "oreCoal", "bookshelf", "stoneMoss", "obsidian", "grass_side_overlay", "tallgrass", null, "beacon", null, "workbench_top", "furnace_front", "furnace_side", "dispenser_front", null, "sponge", "glass", "oreDiamond", "oreRedstone", "leaves", "leaves_opaque", "stonebricksmooth", "deadbush", "fern", null, null, "workbench_side", "workbench_front", "furnace_front_lit", "furnace_top", "sapling_spruce", "cloth_0", "mobSpawner", "snow", "ice", "snow_side", "cactus_top", "cactus_side", "cactus_bottom", "clay", "reeds", "musicBlock", "jukebox_top", "waterlily", "mycel_side", "mycel_top", "sapling_birch", "torch", "doorWood_upper", "doorIron_upper", "ladder", "trapdoor", "fenceIron", "farmland_wet", "farmland_dry", "crops_0", "crops_1", "crops_2", "crops_3", "crops_4", "crops_5", "crops_6", "crops_7", "lever", "doorWood_lower", "doorIron_lower", "redtorch_lit", "stonebricksmooth_mossy", "stonebricksmooth_cracked", "pumpkin_top", "hellrock", "hellsand", "lightgem", "piston_top_sticky", "piston_top", "piston_side", "piston_bottom", "piston_inner_top", "stem_straight", "rail_turn", "cloth_15", "cloth_7", "redtorch", "tree_spruce", "tree_birch", "pumpkin_side", "pumpkin_face", "pumpkin_jack", "cake_top", "cake_side", "cake_inner", "cake_bottom", "mushroom_skin_red", "mushroom_skin_brown", "stem_bent", "rail", "cloth_14", "cloth_6", "repeater", "leaves_spruce", "leaves_spruce_opaque", "bed_feet_top", "bed_head_top", "melon_side", "melon_top", "cauldron_top", "cauldron_inner", null, "mushroom_skin_stem", "mushroom_inside", "vine", "blockLapis", "cloth_13", "cloth_5", "repeater_lit", "thinglass_top", "bed_feet_end", "bed_feet_side", "bed_head_side", "bed_head_end", "tree_jungle", "cauldron_side", "cauldron_bottom", "brewingStand_base", "brewingStand", "endframe_top", "endframe_side", "oreLapis", "cloth_12", "cloth_4", "goldenRail", "redstoneDust_cross", "redstoneDust_line", "enchantment_top", "dragonEgg", "cocoa_2", "cocoa_1", "cocoa_0", "oreEmerald", "tripWireSource", "tripWire", "endframe_eye", "whiteStone", "sandstone_top", "cloth_11", "cloth_3", "goldenRail_powered", "redstoneDust_cross_overlay", "redstoneDust_line_overlay", "enchantment_side", "enchantment_bottom", "commandBlock", "itemframe_back", "flowerPot", null, null, null, null, null, "sandstone_side", "cloth_10", "cloth_2", "detectorRail", "leaves_jungle", "leaves_jungle_opaque", "wood_spruce", "wood_jungle", "carrots_0", "carrots_1", "carrots_2", "carrots_3", "potatoes_3", null, null, null, "sandstone_bottom", "cloth_9", "cloth_1", "redstoneLight", "redstoneLight_lit", "stonebricksmooth_carved", "wood_birch", "anvil_base", "anvil_top_damaged_1", null, null, null, null, null, null, null, "netherBrick", "cloth_8", "netherStalk_0", "netherStalk_1", "netherStalk_2", "sandstone_carved", "sandstone_smooth", "anvil_top", "anvil_top_damaged_2", null, null, null, null, null, null, null, "destroy_0", "destroy_1", "destroy_2", "destroy_3", "destroy_4", "destroy_5", "destroy_6", "destroy_7", "destroy_8", "destroy_9", null, null, null, null, null, null, /* Extra 1.5-based textures: starting at 256 (corresponds to TILEINDEX_ values) */ null, "water", "water_flow", "lava", "lava_flow", null, null, null, null, "fire_0", "portal" }; private static class CustomTileRec { int srcx, srcy, width, height, targetx, targety; } private static int next_dynamic_tile = MAX_TILEINDEX+1; private static class DynamicTileFile { int idx; /* Index of tile in addonfiles */ String filename; int tilecnt_x, tilecnt_y; /* Number of tiles horizontally and vertically */ int tile_to_dyntile[]; /* Mapping from tile index in tile file to dynamic ID in global tile table (terrain_argb): 0=unassigned */ TileFileFormat format; List<CustomTileRec> cust; String[] tilenames; /* For TILESET, array of tilenames, indexed by tile index */ String setdir; /* For TILESET, directory of tile set in texture */ } private static ArrayList<DynamicTileFile> addonfiles = new ArrayList<DynamicTileFile>(); private static Map<String, DynamicTileFile> addonfilesbyname = new HashMap<String, DynamicTileFile>(); private static String getBlockFileName(int idx) { if ((idx >= 0) && (idx < terrain_map.length) && (terrain_map[idx] != null)) { return "textures/blocks/" + terrain_map[idx] + ".png"; } return null; } /* Reset add-on tile data */ private static void resetFiles() { synchronized(packlock) { packs.clear(); } addonfiles.clear(); addonfilesbyname.clear(); next_dynamic_tile = MAX_TILEINDEX+1; /* Now, load entries for vanilla v1.5 files */ for(int i = 0; i < terrain_map.length; i++) { String fn = getBlockFileName(i); if (fn != null) { int idx = findOrAddDynamicTileFile(fn, 1, 1, TileFileFormat.GRID, new String[0]); DynamicTileFile dtf = addonfiles.get(idx); if (dtf != null) { // Fix mapping of tile ID to global table index dtf.tile_to_dyntile[0] = i; } } } } private static class LoadedImage { int[] argb; int width, height; int trivial_color; } private int[][] terrain_argb; private int native_scale; private CTMTexturePack ctm; private BitSet hasBlockColoring = new BitSet(); // Quick lookup - (blockID << 4) + blockMeta - set if custom colorizer private DynIntHashMap blockColoring = new DynIntHashMap(); // Map - index by (blockID << 4) + blockMeta - Index of image for color map private int water_toned_op = COLORMOD_WATERTONED; private static final int IMG_GRASSCOLOR = 0; private static final int IMG_FOLIAGECOLOR = 1; private static final int IMG_CUSTOMWATERMOVING = 2; private static final int IMG_CUSTOMWATERSTILL = 3; private static final int IMG_CUSTOMLAVAMOVING = 4; private static final int IMG_CUSTOMLAVASTILL = 5; private static final int IMG_WATERCOLORX = 6; private static final int IMG_SWAMPGRASSCOLOR = 7; private static final int IMG_SWAMPFOLIAGECOLOR = 8; private static final int IMG_CNT = 9; /* 0-(IMG_CNT-1) are fixed, IMG_CNT+x is dynamic file x */ private LoadedImage[] imgs; private HashMap<Integer, TexturePack> scaled_textures; private Object scaledlock = new Object(); public enum BlockTransparency { OPAQUE, /* Block is opaque - blocks light - lit by light from adjacent blocks */ TRANSPARENT, /* Block is transparent - passes light - lit by light level in own block */ SEMITRANSPARENT, /* Opaque block that doesn't block all rays (steps, slabs) - use light above for face lighting on opaque blocks */ LEAVES /* Special case of transparent, to work around lighting errors in SpoutPlugin */ } public static class HDTextureMap { private int faces[]; /* index in terrain.png of image for each face (indexed by BlockStep.ordinal() OR patch index) */ private byte[] layers; /* If layered, each index corresponds to faces index, and value is index of next layer */ private List<Integer> blockids; private int databits; private BlockTransparency bt; private boolean userender; private String blockset; private int colorMult; private CustomColorMultiplier custColorMult; private boolean stdrotate; // Marked for corrected to proper : stdrot=true private static HDTextureMap[] texmaps; private static BlockTransparency transp[]; private static boolean userenderdata[]; private static HDTextureMap blank; private static void resizeTable(int idx) { int cnt = idx+1; /* Copy texture maps */ HDTextureMap[] newtexmaps = new HDTextureMap[cnt*16]; System.arraycopy(texmaps, 0, newtexmaps, 0, texmaps.length); Arrays.fill(newtexmaps, texmaps.length, newtexmaps.length, blank); texmaps = newtexmaps; /* Copy transparency */ BlockTransparency[] newtrans = new BlockTransparency[cnt]; System.arraycopy(transp, 0, newtrans, 0, transp.length); Arrays.fill(newtrans, transp.length, cnt, BlockTransparency.OPAQUE); transp = newtrans; /* Copy use-render-data */ boolean[] newurd = new boolean[cnt]; System.arraycopy(userenderdata, 0, newurd, 0, userenderdata.length); Arrays.fill(newurd, userenderdata.length, cnt, false); userenderdata = newurd; } private static void initializeTable() { texmaps = new HDTextureMap[16*BLOCKTABLELEN]; transp = new BlockTransparency[BLOCKTABLELEN]; userenderdata = new boolean[BLOCKTABLELEN]; blank = new HDTextureMap(); for(int i = 0; i < texmaps.length; i++) texmaps[i] = blank; for(int i = 0; i < transp.length; i++) transp[i] = BlockTransparency.OPAQUE; } private HDTextureMap() { blockids = Collections.singletonList(Integer.valueOf(0)); databits = 0xFFFF; userender = false; blockset = null; colorMult = 0; custColorMult = null; faces = new int[] { TILEINDEX_BLANK, TILEINDEX_BLANK, TILEINDEX_BLANK, TILEINDEX_BLANK, TILEINDEX_BLANK, TILEINDEX_BLANK }; layers = null; stdrotate = true; } public HDTextureMap(List<Integer> blockids, int databits, int[] faces, byte[] layers, BlockTransparency trans, boolean userender, int colorMult, CustomColorMultiplier custColorMult, String blockset, boolean stdrot) { this.faces = faces; this.layers = layers; this.blockids = blockids; this.databits = databits; this.bt = trans; this.colorMult = colorMult; this.custColorMult = custColorMult; this.userender = userender; this.blockset = blockset; this.stdrotate = stdrot; } public void addToTable() { /* Add entries to lookup table */ for(Integer blkid : blockids) { if(blkid >= transp.length) resizeTable(blkid); if(blkid > 0) { for(int i = 0; i < 16; i++) { if((databits & (1 << i)) != 0) { int idx = 16*blkid + i; if((this.blockset != null) && (this.blockset.equals("core") == false)) { HDBlockModels.resetIfNotBlockSet(blkid, i, this.blockset); } texmaps[idx] = this; } } transp[blkid] = bt; /* Transparency is only blocktype based right now */ userenderdata[blkid] = userender; /* Ditto for using render data */ } } } public static HDTextureMap getMap(int blkid, int blkdata, int blkrenderdata) { try { if(userenderdata[blkid]) return texmaps[(blkid<<4) + blkrenderdata]; else return texmaps[(blkid<<4) + blkdata]; } catch (Exception x) { resizeTable(blkid); return blank; } } public static BlockTransparency getTransparency(int blkid) { try { return transp[blkid]; } catch (Exception x) { resizeTable(blkid); return BlockTransparency.OPAQUE; } } private static void remapTexture(int id, int srcid) { for(int i = 0; i < 16; i++) { texmaps[(id<<4)+i] = texmaps[(srcid<<4)+i]; } } } /** * Texture map - used for accumulation of textures from different sources, keyed by lookup value */ public static class TextureMap { private Map<Integer, Integer> key_to_index = new HashMap<Integer, Integer>(); private List<Integer> texture_ids = new ArrayList<Integer>(); private List<Integer> blockids = new ArrayList<Integer>(); private int databits = 0; private BlockTransparency trans = BlockTransparency.OPAQUE; private boolean userender = false; private int colorMult = 0; private CustomColorMultiplier custColorMult = null; private String blockset; public int addTextureByKey(int key, int textureid) { int off = texture_ids.size(); /* Next index in array is texture index */ texture_ids.add(textureid); /* Add texture ID to list */ key_to_index.put(key, off); /* Add texture index to lookup by key */ return off; } } private static HashMap<String, TextureMap> textmap_by_id = new HashMap<String, TextureMap>(); /** * Add texture to texture map */ private static int addTextureByKey(String id, int key, int textureid) { TextureMap idx = textmap_by_id.get(id); if(idx == null) { /* Add empty one, if not found */ idx = new TextureMap(); textmap_by_id.put(id, idx); } return idx.addTextureByKey(key, textureid); } /** * Add settings for texture map */ private static void addTextureIndex(String id, List<Integer> blockids, int databits, BlockTransparency trans, boolean userender, int colorMult, CustomColorMultiplier custColorMult, String blockset) { TextureMap idx = textmap_by_id.get(id); if(idx == null) { /* Add empty one, if not found */ idx = new TextureMap(); textmap_by_id.put(id, idx); } idx.blockids = blockids; idx.databits = databits; idx.trans = trans; idx.userender = userender; idx.colorMult = colorMult; idx.custColorMult = custColorMult; } /** * Finish processing of texture indexes - add to texture maps */ private static void processTextureMaps() { for(TextureMap ti : textmap_by_id.values()) { if(ti.blockids.isEmpty()) continue; int[] txtids = new int[ti.texture_ids.size()]; for(int i = 0; i < txtids.length; i++) { txtids[i] = ti.texture_ids.get(i).intValue(); } HDTextureMap map = new HDTextureMap(ti.blockids, ti.databits, txtids, null, ti.trans, ti.userender, ti.colorMult, ti.custColorMult, ti.blockset, true); map.addToTable(); } } /** * Get index of texture in texture map */ public static int getTextureIndexFromTextureMap(String id, int key) { int idx = -1; TextureMap map = textmap_by_id.get(id); if(map != null) { Integer txtidx = map.key_to_index.get(key); if(txtidx != null) { idx = txtidx.intValue(); } } return idx; } /* * Get count of textures in given texture map */ public static int getTextureMapLength(String id) { TextureMap map = textmap_by_id.get(id); if(map != null) { return map.texture_ids.size(); } return -1; } /** Get or load texture pack */ public static TexturePack getTexturePack(DynmapCore core, String tpname) { synchronized(packlock) { TexturePack tp = packs.get(tpname); if(tp != null) return tp; try { tp = new TexturePack(core, tpname); /* Attempt to load pack */ packs.put(tpname, tp); return tp; } catch (FileNotFoundException fnfx) { Log.severe("Error loading texture pack '" + tpname + "' - not found"); } return null; } } /** * Constructor for texture pack, by name */ private TexturePack(DynmapCore core, String tpname) throws FileNotFoundException { File texturedir = getTexturePackDirectory(core); /* Set up for enough files */ imgs = new LoadedImage[IMG_CNT + addonfiles.size()]; // Get texture pack File f = new File(texturedir, tpname); // Build loader TexturePackLoader tpl = new TexturePackLoader(f); InputStream is = null; try { /* Load CTM support, if enabled */ if(core.isCTMSupportEnabled()) { ctm = new CTMTexturePack(tpl, this, core); if(ctm.isValid() == false) { ctm = null; } } /* Load custom colors support, if enabled */ if(core.isCustomColorsSupportEnabled()) { is = tpl.openTPResource("color.properties"); Properties p; if (is != null) { p = new Properties(); try { p.load(is); } finally { tpl.closeResource(is); } processCustomColors(p); } } /* Loop through dynamic files */ for(int i = 0; i < addonfiles.size(); i++) { DynamicTileFile dtf = addonfiles.get(i); is = tpl.openTPResource(dtf.filename); try { if(dtf.format == TileFileFormat.BIOME) loadBiomeShadingImage(is, i+IMG_CNT); /* Load image file */ else loadImage(is, i+IMG_CNT); /* Load image file */ } finally { tpl.closeResource(is); } } /* Find and load terrain.png */ is = tpl.openTPResource(TERRAIN_PNG); /* Try to find terrain.png */ if (is != null) { loadTerrainPNG(is); tpl.closeResource(is); } /* Try to find and load misc/grasscolor.png */ is = tpl.openTPResource(GRASSCOLOR_PNG); if (is != null) { loadBiomeShadingImage(is, IMG_GRASSCOLOR); tpl.closeResource(is); } /* Try to find and load misc/foliagecolor.png */ is = tpl.openTPResource(FOLIAGECOLOR_PNG); if (is != null) { loadBiomeShadingImage(is, IMG_FOLIAGECOLOR); tpl.closeResource(is); } /* Try to find and load misc/swampgrasscolor.png */ is = tpl.openTPResource(SWAMPGRASSCOLOR_PNG); if (is != null) { loadBiomeShadingImage(is, IMG_SWAMPGRASSCOLOR); tpl.closeResource(is); } /* Try to find and load misc/swampfoliagecolor.png */ is = tpl.openTPResource(SWAMPFOLIAGECOLOR_PNG); if (is != null) { loadBiomeShadingImage(is, IMG_SWAMPFOLIAGECOLOR); tpl.closeResource(is); } /* Try to find and load misc/watercolor.png */ is = tpl.openTPResource(WATERCOLORX_PNG); if (is != null) { loadBiomeShadingImage(is, IMG_WATERCOLORX); tpl.closeResource(is); } /* Optional files - process if they exist */ is = tpl.openTPResource(CUSTOMLAVASTILL_PNG); if (is == null) { is = tpl.openTPResource("anim/" + CUSTOMLAVASTILL_PNG); } if (is != null) { loadImage(is, IMG_CUSTOMLAVASTILL); tpl.closeResource(is); patchTextureWithImage(IMG_CUSTOMLAVASTILL, TILEINDEX_STATIONARYLAVA); patchTextureWithImage(IMG_CUSTOMLAVASTILL, TILEINDEX_MOVINGLAVA); } is = tpl.openTPResource(CUSTOMLAVAFLOWING_PNG); if (is == null) { is = tpl.openTPResource("anim/" + CUSTOMLAVAFLOWING_PNG); } if (is != null) { loadImage(is, IMG_CUSTOMLAVAMOVING); tpl.closeResource(is); patchTextureWithImage(IMG_CUSTOMLAVAMOVING, TILEINDEX_MOVINGLAVA); } is = tpl.openTPResource(CUSTOMWATERSTILL_PNG); if (is == null) { is = tpl.openTPResource("anim/" + CUSTOMWATERSTILL_PNG); } if (is != null) { loadImage(is, IMG_CUSTOMWATERSTILL); tpl.closeResource(is); patchTextureWithImage(IMG_CUSTOMWATERSTILL, TILEINDEX_STATIONARYWATER); patchTextureWithImage(IMG_CUSTOMWATERSTILL, TILEINDEX_MOVINGWATER); } is = tpl.openTPResource(CUSTOMWATERFLOWING_PNG); if (is == null) { is = tpl.openTPResource("anim/" + CUSTOMWATERFLOWING_PNG); } if (is != null) { loadImage(is, IMG_CUSTOMWATERMOVING); tpl.closeResource(is); patchTextureWithImage(IMG_CUSTOMWATERMOVING, TILEINDEX_MOVINGWATER); } /* Loop through dynamic files */ for(int i = 0; i < addonfiles.size(); i++) { DynamicTileFile dtf = addonfiles.get(i); processDynamicImage(i, dtf.format); } } catch (IOException iox) { Log.severe("Error loadling texture pack", iox); } finally { if (is != null) { try { is.close(); } catch (IOException iox) {} is = null; } tpl.close(); } } /** * Copy subimage from portions of given image * @param img_id - image ID of raw image * @param from_x - top-left X * @param from_y - top-left Y * @param to_x - dest topleft * @param to_y - dest topleft * @param width - width to copy * @param height - height to copy * @param dest_argb - destination tile buffer * @param dest_width - width of destination tile buffer */ private void copySubimageFromImage(int img_id, int from_x, int from_y, int to_x, int to_y, int width, int height, int[] dest_argb, int dest_width) { for(int h = 0; h < height; h++) { System.arraycopy(imgs[img_id].argb, (h+from_y)*imgs[img_id].width + from_x, dest_argb, dest_width*(h+to_y) + to_x, width); } } private enum HandlePos { CENTER, LEFT, RIGHT, NONE, LEFTFRONT, RIGHTFRONT }; /** * Make chest side image (based on chest and largechest layouts) * @param img_id - source image ID * @param dest_idx - destination tile index * @param src_x - starting X of source (scaled based on 64 high) * @param width - width to copy (scaled based on 64 high) * @param dest_x - destination X (scaled based on 64 high) * @param handlepos - 0=middle,1=leftedge,2=rightedge */ private void makeChestSideImage(int img_id, int dest_idx, int src_x, int width, int dest_x, HandlePos handlepos) { if(dest_idx <= 0) return; int mult = imgs[img_id].height / 64; /* Nominal height for chest images is 64 */ int[] tile = new int[16 * 16 * mult * mult]; /* Make image */ /* Copy top part */ copySubimageFromImage(img_id, src_x * mult, 14 * mult, dest_x * mult, 2 * mult, width * mult, 5 * mult, tile, 16 * mult); /* Copy bottom part */ copySubimageFromImage(img_id, src_x * mult, 34 * mult, dest_x * mult, 7 * mult, width * mult, 9 * mult, tile, 16 * mult); /* Handle the handle image */ if(handlepos == HandlePos.CENTER) { /* Middle */ copySubimageFromImage(img_id, 1 * mult, 1 * mult, 7 * mult, 4 * mult, 2 * mult, 4 * mult, tile, 16 * mult); } else if(handlepos == HandlePos.LEFT) { /* left edge */ copySubimageFromImage(img_id, 3 * mult, 1 * mult, 0 * mult, 4 * mult, 1 * mult, 4 * mult, tile, 16 * mult); } else if(handlepos == HandlePos.LEFTFRONT) { /* left edge - front of handle */ copySubimageFromImage(img_id, 2 * mult, 1 * mult, 0 * mult, 4 * mult, 1 * mult, 4 * mult, tile, 16 * mult); } else if(handlepos == HandlePos.RIGHT) { /* Right */ copySubimageFromImage(img_id, 0 * mult, 1 * mult, 15 * mult, 4 * mult, 1 * mult, 4 * mult, tile, 16 * mult); } else if(handlepos == HandlePos.RIGHTFRONT) { /* Right - front of handle */ copySubimageFromImage(img_id, 1 * mult, 1 * mult, 15 * mult, 4 * mult, 1 * mult, 4 * mult, tile, 16 * mult); } /* Put scaled result into tile buffer */ int new_argb[] = new int[native_scale*native_scale]; scaleTerrainPNGSubImage(16*mult, native_scale, tile, new_argb); terrain_argb[dest_idx] = new_argb; } /** * Make chest top/bottom image (based on chest and largechest layouts) * @param img_id - source image ID * @param dest_idx - destination tile index * @param src_x - starting X of source (scaled based on 64 high) * @param src_y - starting Y of source (scaled based on 64 high) * @param width - width to copy (scaled based on 64 high) * @param dest_x - destination X (scaled based on 64 high) * @param handlepos - 0=middle,1=left-edge (righttop),2=right-edge (lefttop) */ private void makeChestTopBottomImage(int img_id, int dest_idx, int src_x, int src_y, int width, int dest_x, HandlePos handlepos) { if(dest_idx <= 0) return; int mult = imgs[img_id].height / 64; /* Nominal height for chest images is 64 */ int[] tile = new int[16 * 16 * mult * mult]; /* Make image */ copySubimageFromImage(img_id, src_x * mult, src_y * mult, dest_x * mult, 1 * mult, width * mult, 14 * mult, tile, 16 * mult); /* Handle the handle image */ if(handlepos == HandlePos.CENTER) { /* Middle */ copySubimageFromImage(img_id, 1 * mult, 0, 7 * mult, 15 * mult, 2 * mult, 1 * mult, tile, 16 * mult); } else if(handlepos == HandlePos.LEFT) { /* left edge */ copySubimageFromImage(img_id, 2 * mult, 0, 0 * mult, 15 * mult, 1 * mult, 1 * mult, tile, 16 * mult); } else if(handlepos == HandlePos.RIGHT) { /* Right */ copySubimageFromImage(img_id, 1 * mult, 0, 15 * mult, 15 * mult, 1 * mult, 1 * mult, tile, 16 * mult); } /* Put scaled result into tile buffer */ int new_argb[] = new int[native_scale*native_scale]; scaleTerrainPNGSubImage(16*mult, native_scale, tile, new_argb); terrain_argb[dest_idx] = new_argb; } /** * Patch tiles based on image with chest-style layout */ private void patchChestImages(int img_id, int tile_top, int tile_bottom, int tile_front, int tile_back, int tile_left, int tile_right) { makeChestSideImage(img_id, tile_front, 14, 14, 1, HandlePos.CENTER); makeChestSideImage(img_id, tile_back, 42, 14, 1, HandlePos.NONE); makeChestSideImage(img_id, tile_left, 0, 14, 1, HandlePos.RIGHT); makeChestSideImage(img_id, tile_right, 28, 14, 1, HandlePos.LEFT); makeChestTopBottomImage(img_id, tile_top, 14, 0, 14, 1, HandlePos.CENTER); makeChestTopBottomImage(img_id, tile_bottom, 28, 19, 14, 1, HandlePos.CENTER); } /** * Patch tiles based on image with large-chest-style layout */ private void patchLargeChestImages(int img_id, int tile_topright, int tile_topleft, int tile_bottomright, int tile_bottomleft, int tile_right, int tile_left, int tile_frontright, int tile_frontleft, int tile_backright, int tile_backleft) { makeChestSideImage(img_id, tile_frontleft, 14, 15, 1, HandlePos.RIGHTFRONT); makeChestSideImage(img_id, tile_frontright, 29, 15, 0, HandlePos.LEFTFRONT); makeChestSideImage(img_id, tile_left, 0, 14, 1, HandlePos.RIGHT); makeChestSideImage(img_id, tile_right, 44, 14, 1, HandlePos.LEFT); makeChestSideImage(img_id, tile_backright, 58, 15, 1, HandlePos.NONE); makeChestSideImage(img_id, tile_backleft, 73, 15, 0, HandlePos.NONE); makeChestTopBottomImage(img_id, tile_topleft, 14, 0, 15, 1, HandlePos.RIGHT); makeChestTopBottomImage(img_id, tile_topright, 29, 0, 15, 0, HandlePos.LEFT); makeChestTopBottomImage(img_id, tile_bottomleft, 34, 19, 15, 1, HandlePos.RIGHT); makeChestTopBottomImage(img_id, tile_bottomright, 49, 19, 15, 0, HandlePos.LEFT); } /** * Make sign image (based on sign layouts) * @param img_id - source image ID * @param dest_idx - destination tile index * @param src_x - starting X of source (scaled based on 32 high) * @param src_y - starting Y of source (scaled based on 32 high) * @param width - width to copy (scaled based on 32 high) * @param height - height to copy (scaled based on 32 high) */ private void makeSignImage(int img_id, int dest_idx, int src_x, int src_y, int width, int height) { int mult = imgs[img_id].height / 32; /* Nominal height for sign images is 32 */ int[] tile = new int[24 * 24 * mult * mult]; /* Make image (all are 24x24) */ copySubimageFromImage(img_id, src_x * mult, src_y * mult, 0, (24-height)*mult, width * mult, height * mult, tile, 24 * mult); /* Put scaled result into tile buffer */ int new_argb[] = new int[native_scale*native_scale]; scaleTerrainPNGSubImage(24*mult, native_scale, tile, new_argb); terrain_argb[dest_idx] = new_argb; } private void patchSignImages(int img, int sign_front, int sign_back, int sign_top, int sign_bottom, int sign_left, int sign_right, int post_front, int post_back, int post_left, int post_right) { /* Load images at lower left corner of each tile */ makeSignImage(img, sign_front, 2, 2, 24, 12); makeSignImage(img, sign_back, 28, 2, 24, 12); makeSignImage(img, sign_top, 2, 0, 24, 2); makeSignImage(img, sign_left, 0, 2, 2, 12); makeSignImage(img, sign_right, 26, 2, 2, 12); makeSignImage(img, sign_bottom, 26, 0, 24, 2); makeSignImage(img, post_front, 0, 16, 2, 14); makeSignImage(img, post_right, 2, 16, 2, 14); makeSignImage(img, post_back, 4, 16, 2, 14); makeSignImage(img, post_left, 6, 16, 2, 14); } /** * Make face image (based on skin layouts) * @param img_id - source image ID * @param dest_idx - destination tile index * @param src_x - starting X of source (scaled based on 32 high) * @param src_y - starting Y of source (scaled based on 32 high) */ private void makeFaceImage(int img_id, int dest_idx, int src_x, int src_y) { int mult = imgs[img_id].width / 64; /* Nominal height for skin images is 32 */ int[] tile = new int[8 * 8 * mult * mult]; /* Make image (all are 8x8) */ copySubimageFromImage(img_id, src_x * mult, src_y * mult, 0, 0, 8 * mult, 8 * mult, tile, 8 * mult); /* Put scaled result into tile buffer */ int new_argb[] = new int[native_scale*native_scale]; scaleTerrainPNGSubImage(8 * mult, native_scale, tile, new_argb); terrain_argb[dest_idx] = new_argb; } private void patchSkinImages(int img, int face_front, int face_left, int face_right, int face_back, int face_top, int face_bottom) { makeFaceImage(img, face_front, 8, 8); makeFaceImage(img, face_left, 16, 8); makeFaceImage(img, face_right, 0, 8); makeFaceImage(img, face_back, 24, 8); makeFaceImage(img, face_top, 8, 0); makeFaceImage(img, face_bottom, 16, 0); } private void patchCustomImages(int img_id, int[] imgids, List<CustomTileRec> recs, int xcnt, int ycnt) { int mult = imgs[img_id].height / (ycnt * 16); /* Compute scale based on nominal tile count vertically (ycnt * 16) */ for(int i = 0; i < imgids.length; i++) { if(imgids[i] <= 0) continue; CustomTileRec ctr = recs.get(i); if(ctr == null) continue; int[] tile = new int[16 * 16 * mult * mult]; /* Make image */ copySubimageFromImage(img_id, ctr.srcx * mult, ctr.srcy * mult, ctr.targetx * mult, ctr.targety * mult, ctr.width * mult, ctr.height * mult, tile, 16 * mult); /* Put scaled result into tile buffer */ int new_argb[] = new int[native_scale*native_scale]; scaleTerrainPNGSubImage(16*mult, native_scale, tile, new_argb); terrain_argb[imgids[i]] = new_argb; } } /* Copy texture pack */ private TexturePack(TexturePack tp) { this.terrain_argb = new int[tp.terrain_argb.length][]; System.arraycopy(tp.terrain_argb, 0, this.terrain_argb, 0, this.terrain_argb.length); this.native_scale = tp.native_scale; this.water_toned_op = tp.water_toned_op; this.ctm = tp.ctm; this.imgs = tp.imgs; this.hasBlockColoring = tp.hasBlockColoring; this.blockColoring = tp.blockColoring; } /* Load terrain.png */ private void loadTerrainPNG(InputStream is) throws IOException { int i, j; /* Load image */ ImageIO.setUseCache(false); BufferedImage img = ImageIO.read(is); if(img == null) { throw new FileNotFoundException(); } terrain_argb = new int[TILETABLE_LEN][]; int[] blank; /* If we're using pre 1.5 terrain.png */ if(img.getWidth() >= 256) { native_scale = img.getWidth() / 16; blank = new int[native_scale*native_scale]; for(i = 0; i < 256; i++) { terrain_argb[i] = new int[native_scale*native_scale]; img.getRGB((i & 0xF)*native_scale, (i>>4)*native_scale, native_scale, native_scale, terrain_argb[i], 0, native_scale); } /* Now, load extra scaled images */ for(i = 256; i < terrain_map.length; i++) { terrain_argb[i] = blank; String fn = getBlockFileName(i); if (fn == null) continue; DynamicTileFile dtf = addonfilesbyname.get(fn); if (dtf == null) continue; LoadedImage li = imgs[dtf.idx + IMG_CNT]; if(li != null) { terrain_argb[i] = new int[native_scale * native_scale]; scaleTerrainPNGSubImage(li.width, native_scale, li.argb, terrain_argb[i]); } } } else { /* Else, use v1.5 tile files */ native_scale = 16; /* Loop through textures - find biggest one */ for(i = 0; i < terrain_map.length; i++) { String fn = getBlockFileName(i); if (fn == null) continue; DynamicTileFile dtf = addonfilesbyname.get(fn); if (dtf == null) continue; LoadedImage li = imgs[dtf.idx+IMG_CNT]; if(li != null) { if(native_scale < li.width) native_scale = li.width; } } blank = new int[native_scale*native_scale]; /* Now, load scaled images */ for(i = 0; i < terrain_map.length; i++) { terrain_argb[i] = blank; String fn = getBlockFileName(i); if (fn == null) continue; DynamicTileFile dtf = addonfilesbyname.get(fn); if (dtf == null) continue; LoadedImage li = imgs[dtf.idx + IMG_CNT]; if(li != null) { terrain_argb[i] = new int[native_scale * native_scale]; scaleTerrainPNGSubImage(li.width, native_scale, li.argb, terrain_argb[i]); } } } for(i = terrain_map.length; i < TILETABLE_LEN; i++) { terrain_argb[i] = blank; } /* Now, build redstone textures with active wire color (since we're not messing with that) */ Color tc = new Color(); for(i = 0; i < native_scale*native_scale; i++) { if(terrain_argb[TILEINDEX_REDSTONE_NSEW_TONE][i] != 0) { /* Overlay NSEW redstone texture with toned wire color */ tc.setARGB(terrain_argb[TILEINDEX_REDSTONE_NSEW_TONE][i]); tc.blendColor(0xFFC00000); /* Blend in red */ terrain_argb[TILEINDEX_REDSTONE_NSEW][i] = tc.getARGB(); } if(terrain_argb[TILEINDEX_REDSTONE_EW_TONE][i] != 0) { /* Overlay NSEW redstone texture with toned wire color */ tc.setARGB(terrain_argb[TILEINDEX_REDSTONE_EW_TONE][i]); tc.blendColor(0xFFC00000); /* Blend in red */ terrain_argb[TILEINDEX_REDSTONE_EW][i] = tc.getARGB(); } } /* Build extended piston side texture - take top 1/4 of piston side, use to make piston extension */ terrain_argb[TILEINDEX_PISTONEXTSIDE] = new int[native_scale*native_scale]; System.arraycopy(terrain_argb[TILEINDEX_PISTONSIDE], 0, terrain_argb[TILEINDEX_PISTONEXTSIDE], 0, native_scale * native_scale / 4); for(i = 0; i < native_scale/4; i++) { for(j = 0; j < (3*native_scale/4); j++) { terrain_argb[TILEINDEX_PISTONEXTSIDE][native_scale*(native_scale/4 + j) + (3*native_scale/8 + i)] = terrain_argb[TILEINDEX_PISTONSIDE][native_scale*i + j]; } } /* Build piston side while extended (cut off top 1/4, replace with rotated top for extension */ terrain_argb[TILEINDEX_PISTONSIDE_EXT] = new int[native_scale*native_scale]; System.arraycopy(terrain_argb[TILEINDEX_PISTONSIDE], native_scale*native_scale/4, terrain_argb[TILEINDEX_PISTONSIDE_EXT], native_scale*native_scale/4, 3 * native_scale * native_scale / 4); /* Copy bottom 3/4 */ for(i = 0; i < native_scale/4; i++) { for(j = 3*native_scale/4; j < native_scale; j++) { terrain_argb[TILEINDEX_PISTONSIDE_EXT][native_scale*(j - 3*native_scale/4) + (3*native_scale/8 + i)] = terrain_argb[TILEINDEX_PISTONSIDE][native_scale*i + j]; } } /* Build glass pane top in NSEW config (we use model to clip it) */ terrain_argb[TILEINDEX_PANETOP_X] = new int[native_scale*native_scale]; System.arraycopy(terrain_argb[TILEINDEX_GLASSPANETOP], 0, terrain_argb[TILEINDEX_PANETOP_X], 0, native_scale*native_scale); for(i = native_scale*7/16; i < native_scale*9/16; i++) { for(j = 0; j < native_scale; j++) { terrain_argb[TILEINDEX_PANETOP_X][native_scale*i + j] = terrain_argb[TILEINDEX_PANETOP_X][native_scale*j + i]; } } /* Build air frame with eye overlay */ terrain_argb[TILEINDEX_AIRFRAME_EYE] = new int[native_scale*native_scale]; System.arraycopy(terrain_argb[TILEINDEX_AIRFRAME], 0, terrain_argb[TILEINDEX_AIRFRAME_EYE], 0, native_scale*native_scale); for(i = native_scale/4; i < native_scale*3/4; i++) { for(j = native_scale/4; j < native_scale*3/4; j++) { terrain_argb[TILEINDEX_AIRFRAME_EYE][native_scale*i + j] = terrain_argb[TILEINDEX_EYEOFENDER][native_scale*i + j]; } } img.flush(); } /* Load image into image array */ private void loadImage(InputStream is, int idx) throws IOException { BufferedImage img = null; /* Load image */ if(is != null) { ImageIO.setUseCache(false); img = ImageIO.read(is); if(img == null) { throw new FileNotFoundException(); } } if(idx >= imgs.length) { LoadedImage[] newimgs = new LoadedImage[idx+1]; System.arraycopy(imgs, 0, newimgs, 0, imgs.length); imgs = newimgs; } imgs[idx] = new LoadedImage(); if (img != null) { imgs[idx].width = img.getWidth(); imgs[idx].height = img.getHeight(); imgs[idx].argb = new int[imgs[idx].width * imgs[idx].height]; img.getRGB(0, 0, imgs[idx].width, imgs[idx].height, imgs[idx].argb, 0, imgs[idx].width); img.flush(); } else { imgs[idx].width = 16; imgs[idx].height = 16; imgs[idx].argb = new int[imgs[idx].width * imgs[idx].height]; } } /* Process dynamic texture files, and patch into terrain_argb */ private void processDynamicImage(int idx, TileFileFormat format) { DynamicTileFile dtf = addonfiles.get(idx); /* Get tile file definition */ LoadedImage li = imgs[idx+IMG_CNT]; if (li == null) return; switch(format) { case GRID: /* If grid format tile file */ int dim = li.width / dtf.tilecnt_x; /* Dimension of each tile */ int old_argb[] = new int[dim*dim]; for(int x = 0; x < dtf.tilecnt_x; x++) { for(int y = 0; y < dtf.tilecnt_y; y++) { int tileidx = dtf.tile_to_dyntile[y*dtf.tilecnt_x + x]; if (tileidx < 0) continue; if((tileidx >= terrain_map.length) || (terrain_map[tileidx] == null)) { /* dynamic ID? */ /* Copy source tile */ for(int j = 0; j < dim; j++) { System.arraycopy(li.argb, (y*dim+j)*li.width + (x*dim), old_argb, j*dim, dim); } /* Rescale to match rest of terrain PNG */ int new_argb[] = new int[native_scale*native_scale]; scaleTerrainPNGSubImage(dim, native_scale, old_argb, new_argb); terrain_argb[tileidx] = new_argb; } } } break; case CHEST: patchChestImages(idx+IMG_CNT, dtf.tile_to_dyntile[TILEINDEX_CHEST_TOP], dtf.tile_to_dyntile[TILEINDEX_CHEST_BOTTOM], dtf.tile_to_dyntile[TILEINDEX_CHEST_FRONT], dtf.tile_to_dyntile[TILEINDEX_CHEST_BACK], dtf.tile_to_dyntile[TILEINDEX_CHEST_LEFT], dtf.tile_to_dyntile[TILEINDEX_CHEST_RIGHT]); break; case BIGCHEST: patchLargeChestImages(idx+IMG_CNT, dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_TOPRIGHT], dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_TOPLEFT], dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_BOTTOMRIGHT], dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_BOTTOMLEFT], dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_RIGHT], dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_LEFT], dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_FRONTRIGHT], dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_FRONTLEFT], dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_BACKRIGHT], dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_BACKLEFT]); break; case SIGN: patchSignImages(idx+IMG_CNT, dtf.tile_to_dyntile[TILEINDEX_SIGN_FRONT], dtf.tile_to_dyntile[TILEINDEX_SIGN_BACK], dtf.tile_to_dyntile[TILEINDEX_SIGN_TOP], dtf.tile_to_dyntile[TILEINDEX_SIGN_BOTTOM], dtf.tile_to_dyntile[TILEINDEX_SIGN_LEFTSIDE], dtf.tile_to_dyntile[TILEINDEX_SIGN_RIGHTSIDE], dtf.tile_to_dyntile[TILEINDEX_SIGN_POSTFRONT], dtf.tile_to_dyntile[TILEINDEX_SIGN_POSTBACK], dtf.tile_to_dyntile[TILEINDEX_SIGN_POSTLEFT], dtf.tile_to_dyntile[TILEINDEX_SIGN_POSTRIGHT]); break; case SKIN: patchSkinImages(idx+IMG_CNT, dtf.tile_to_dyntile[TILEINDEX_SKIN_FACEFRONT], dtf.tile_to_dyntile[TILEINDEX_SKIN_FACELEFT], dtf.tile_to_dyntile[TILEINDEX_SKIN_FACERIGHT], dtf.tile_to_dyntile[TILEINDEX_SKIN_FACEBACK], dtf.tile_to_dyntile[TILEINDEX_SKIN_FACETOP], dtf.tile_to_dyntile[TILEINDEX_SKIN_FACEBOTTOM]); break; case CUSTOM: patchCustomImages(idx+IMG_CNT, dtf.tile_to_dyntile, dtf.cust, dtf.tilecnt_x, dtf.tilecnt_y); break; case TILESET: // TODO break; default: break; } } /* Load biome shading image into image array */ private void loadBiomeShadingImage(InputStream is, int idx) throws IOException { loadImage(is, idx); /* Get image */ LoadedImage li = imgs[idx]; if (li.width != 256) { /* Required to be 256 x 256 */ int[] scaled = new int[256*256]; scaleTerrainPNGSubImage(li.width, 256, li.argb, scaled); li.argb = scaled; li.width = 256; li.height = 256; } /* Get trivial color for biome-shading image */ int clr = li.argb[li.height*li.width*3/4 + li.width/2]; boolean same = true; for(int j = 0; same && (j < li.height); j++) { for(int i = 0; same && (i <= j); i++) { if(li.argb[li.width*j+i] != clr) same = false; } } /* All the same - no biome lookup needed */ if(same) { li.trivial_color = clr; } else { /* Else, calculate color average for lower left quadrant */ int[] clr_scale = new int[4]; scaleTerrainPNGSubImage(li.width, 2, li.argb, clr_scale); li.trivial_color = clr_scale[2]; } } /* Patch image into texture table */ private void patchTextureWithImage(int image_idx, int block_idx) { /* Now, patch in to block table */ int new_argb[] = new int[native_scale*native_scale]; scaleTerrainPNGSubImage(imgs[image_idx].width, native_scale, imgs[image_idx].argb, new_argb); terrain_argb[block_idx] = new_argb; } /* Get texture pack directory */ private static File getTexturePackDirectory(DynmapCore core) { return new File(core.getDataFolder(), "texturepacks"); } /** * Resample terrain pack for given scale, and return copy using that scale */ public TexturePack resampleTexturePack(int scale) { synchronized(scaledlock) { if(scaled_textures == null) scaled_textures = new HashMap<Integer, TexturePack>(); TexturePack stp = scaled_textures.get(scale); if(stp != null) return stp; stp = new TexturePack(this); /* Make copy */ /* Scale terrain.png, if needed */ if(stp.native_scale != scale) { stp.native_scale = scale; scaleTerrainPNG(stp); } /* Remember it */ scaled_textures.put(scale, stp); return stp; } } /** * Scale out terrain_argb into the terrain_argb of the provided destination, matching the scale of that destination * @param tp */ private void scaleTerrainPNG(TexturePack tp) { tp.terrain_argb = new int[terrain_argb.length][]; /* Terrain.png is 16x16 array of images : process one at a time */ for(int idx = 0; idx < terrain_argb.length; idx++) { tp.terrain_argb[idx] = new int[tp.native_scale*tp.native_scale]; scaleTerrainPNGSubImage(native_scale, tp.native_scale, terrain_argb[idx], tp.terrain_argb[idx]); } /* Special case - some textures are used as masks - need pure alpha (00 or FF) */ makeAlphaPure(tp.terrain_argb[TILEINDEX_GRASSMASK]); /* Grass side mask */ } public static void scaleTerrainPNGSubImage(int srcscale, int destscale, int[] src_argb, int[] dest_argb) { int nativeres = srcscale; int res = destscale; Color c = new Color(); /* Same size, so just copy */ if(res == nativeres) { System.arraycopy(src_argb, 0, dest_argb, 0, dest_argb.length); } /* If we're scaling larger source pixels into smaller pixels, each destination pixel * receives input from 1 or 2 source pixels on each axis */ else if(res > nativeres) { int weights[] = new int[res]; int offsets[] = new int[res]; /* LCM of resolutions is used as length of line (res * nativeres) * Each native block is (res) long, each scaled block is (nativeres) long * Each scaled block overlaps 1 or 2 native blocks: starting with native block 'offsets[]' with * 'weights[]' of its (res) width in the first, and the rest in the second */ for(int v = 0, idx = 0; v < res*nativeres; v += nativeres, idx++) { offsets[idx] = (v/res); /* Get index of the first native block we draw from */ if((v+nativeres-1)/res == offsets[idx]) { /* If scaled block ends in same native block */ weights[idx] = nativeres; } else { /* Else, see how much is in first one */ weights[idx] = (offsets[idx]*res + res) - v; } } /* Now, use weights and indices to fill in scaled map */ for(int y = 0; y < res; y++) { int ind_y = offsets[y]; int wgt_y = weights[y]; for(int x = 0; x < res; x++) { int ind_x = offsets[x]; int wgt_x = weights[x]; double accum_red = 0; double accum_green = 0; double accum_blue = 0; double accum_alpha = 0; for(int xx = 0; xx < 2; xx++) { int wx = (xx==0)?wgt_x:(nativeres-wgt_x); if(wx == 0) continue; for(int yy = 0; yy < 2; yy++) { int wy = (yy==0)?wgt_y:(nativeres-wgt_y); if(wy == 0) continue; /* Accumulate */ c.setARGB(src_argb[(ind_y+yy)*nativeres + ind_x + xx]); int w = wx * wy; double a = (double)w * (double)c.getAlpha(); accum_red += c.getRed() * a; accum_green += c.getGreen() * a; accum_blue += c.getBlue() * a; accum_alpha += a; } } double newalpha = accum_alpha; if(newalpha == 0.0) newalpha = 1.0; /* Generate weighted compnents into color */ c.setRGBA((int)(accum_red / newalpha), (int)(accum_green / newalpha), (int)(accum_blue / newalpha), (int)(accum_alpha / (nativeres*nativeres))); dest_argb[(y*res) + x] = c.getARGB(); } } } else { /* nativeres > res */ int weights[] = new int[nativeres]; int offsets[] = new int[nativeres]; /* LCM of resolutions is used as length of line (res * nativeres) * Each native block is (res) long, each scaled block is (nativeres) long * Each native block overlaps 1 or 2 scaled blocks: starting with scaled block 'offsets[]' with * 'weights[]' of its (res) width in the first, and the rest in the second */ for(int v = 0, idx = 0; v < res*nativeres; v += res, idx++) { offsets[idx] = (v/nativeres); /* Get index of the first scaled block we draw to */ if((v+res-1)/nativeres == offsets[idx]) { /* If native block ends in same scaled block */ weights[idx] = res; } else { /* Else, see how much is in first one */ weights[idx] = (offsets[idx]*nativeres + nativeres) - v; } } double accum_red[] = new double[res*res]; double accum_green[] = new double[res*res]; double accum_blue[] = new double[res*res]; double accum_alpha[] = new double[res*res]; /* Now, use weights and indices to fill in scaled map */ for(int y = 0; y < nativeres; y++) { int ind_y = offsets[y]; int wgt_y = weights[y]; for(int x = 0; x < nativeres; x++) { int ind_x = offsets[x]; int wgt_x = weights[x]; c.setARGB(src_argb[(y*nativeres) + x]); for(int xx = 0; xx < 2; xx++) { int wx = (xx==0)?wgt_x:(res-wgt_x); if(wx == 0) continue; for(int yy = 0; yy < 2; yy++) { int wy = (yy==0)?wgt_y:(res-wgt_y); if(wy == 0) continue; double w = wx * wy; double a = w * c.getAlpha(); accum_red[(ind_y+yy)*res + (ind_x+xx)] += c.getRed() * a; accum_green[(ind_y+yy)*res + (ind_x+xx)] += c.getGreen() * a; accum_blue[(ind_y+yy)*res + (ind_x+xx)] += c.getBlue() * a; accum_alpha[(ind_y+yy)*res + (ind_x+xx)] += a; } } } } /* Produce normalized scaled values */ for(int y = 0; y < res; y++) { for(int x = 0; x < res; x++) { int off = (y*res) + x; double aa = accum_alpha[off]; if(aa == 0.0) aa = 1.0; c.setRGBA((int)(accum_red[off]/aa), (int)(accum_green[off]/aa), (int)(accum_blue[off]/aa), (int)(accum_alpha[off] / (nativeres*nativeres))); dest_argb[y*res + x] = c.getARGB(); } } } } private static void addFiles(List<String> tsfiles, List<String> txfiles, File dir, String path) { File[] listfiles = dir.listFiles(); if(listfiles == null) return; for(File f : listfiles) { String fn = f.getName(); if(fn.equals(".") || (fn.equals(".."))) continue; if(f.isFile()) { if(fn.endsWith("-texture.txt")) { txfiles.add(path + fn); } if(fn.endsWith("-tilesets.txt")) { tsfiles.add(path + fn); } } else if(f.isDirectory()) { addFiles(tsfiles, txfiles, f, path + f.getName() + "/"); } } } /** * Load texture pack mappings */ public static void loadTextureMapping(DynmapCore core, ConfigurationNode config) { File datadir = core.getDataFolder(); /* Start clean with texture packs - need to be loaded after mapping */ resetFiles(); /* Initialize map with blank map for all entries */ HDTextureMap.initializeTable(); /* Load block models */ InputStream in = TexturePack.class.getResourceAsStream("/texture.txt"); if(in != null) { loadTextureFile(in, "texture.txt", config, core, "core"); if(in != null) { try { in.close(); } catch (IOException x) {} in = null; } } else Log.severe("Error loading texture.txt"); File renderdir = new File(datadir, "renderdata"); ArrayList<String> tsfiles = new ArrayList<String>(); ArrayList<String> txfiles = new ArrayList<String>(); addFiles(tsfiles, txfiles, renderdir, ""); for(String fname : tsfiles) { File custom = new File(renderdir, fname); if(custom.canRead()) { try { in = new FileInputStream(custom); loadTileSetsFile(in, custom.getPath(), config, core, fname.substring(0, fname.indexOf("-tilesets.txt"))); } catch (IOException iox) { Log.severe("Error loading " + custom.getPath() + " - " + iox); } finally { if(in != null) { try { in.close(); } catch (IOException x) {} in = null; } } } } for(String fname : txfiles) { File custom = new File(renderdir, fname); if(custom.canRead()) { try { in = new FileInputStream(custom); loadTextureFile(in, custom.getPath(), config, core, fname.substring(0, fname.indexOf("-texture.txt"))); } catch (IOException iox) { Log.severe("Error loading " + custom.getPath() + " - " + iox); } finally { if(in != null) { try { in.close(); } catch (IOException x) {} in = null; } } } } /* Finish processing of texture maps */ processTextureMaps(); /* Check integrity of texture mappings versus models */ for(int blkiddata = 0; blkiddata < HDTextureMap.texmaps.length; blkiddata++) { int blkid = (blkiddata >> 4); int blkdata = blkiddata & 0xF; HDTextureMap tm = HDTextureMap.texmaps[blkiddata]; int cnt = HDBlockModels.getNeededTextureCount(blkid, blkdata); if(cnt > tm.faces.length){ Log.severe("Block ID " + blkid + ":" + blkdata + " - not enough textures for faces (" + cnt + " > " + tm.faces.length + ")"); int[] newfaces = new int[cnt]; System.arraycopy(tm.faces, 0, newfaces, 0, tm.faces.length); for(int i = tm.faces.length; i < cnt; i++) { newfaces[i] = TILEINDEX_BLANK; } } } } private static Integer getIntValue(Map<String,Integer> vars, String val) throws NumberFormatException { if(Character.isLetter(val.charAt(0))) { int off = val.indexOf('+'); int offset = 0; if (off > 0) { offset = Integer.valueOf(val.substring(off+1)); val = val.substring(0, off); } Integer v = vars.get(val); if(v == null) throw new NumberFormatException("invalid ID - " + val); if((offset != 0) && (v.intValue() > 0)) v = v.intValue() + offset; return v; } else { return Integer.valueOf(val); } } private static int parseTextureIndex(HashMap<String,Integer> filetoidx, int srctxtid, String val) throws NumberFormatException { int off = val.indexOf(':'); int txtid = -1; if(off > 0) { String txt = val.substring(off+1); if(filetoidx.containsKey(txt)) { srctxtid = filetoidx.get(txt); } else { throw new NumberFormatException("Unknown attribute: " + txt); } txtid = Integer.valueOf(val.substring(0, off)); } else { txtid = Integer.valueOf(val); } /* Shift function code from x1000 to x1000000 for internal processing */ int funcid = (txtid / COLORMOD_MULT_FILE); txtid = txtid - (COLORMOD_MULT_FILE * funcid); /* If we have source texture, need to map values to dynamic ids */ if((srctxtid >= 0) && (txtid >= 0)) { /* Map to assigned ID in global tile table: preserve modifier */ txtid =findOrAddDynamicTile(srctxtid, txtid); } return txtid + (COLORMOD_MULT_INTERNAL * funcid); } /** * Load texture pack mappings from tilesets.txt file */ private static void loadTileSetsFile(InputStream txtfile, String txtname, ConfigurationNode config, DynmapCore core, String blockset) { LineNumberReader rdr = null; DynamicTileFile tfile = null; try { String line; rdr = new LineNumberReader(new InputStreamReader(txtfile)); while((line = rdr.readLine()) != null) { if(line.startsWith("#")) { } else if(line.startsWith("tileset:")) { /* Start of tileset definition */ line = line.substring(line.indexOf(':')+1); int xdim = 16, ydim = 16; String fname = null; String setdir = null; String[] toks = line.split(","); for(String tok : toks) { String[] v = tok.split("="); if(v.length < 2) continue; if(v[0].equals("xcount")) { xdim = Integer.parseInt(v[1]); } else if(v[0].equals("ycount")) { ydim = Integer.parseInt(v[1]); } else if(v[0].equals("setdir")) { setdir = v[1]; } else if(v[0].equals("filename")) { fname = v[1]; } } if ((fname != null) && (setdir != null)) { /* Register tile file */ int fid = findOrAddDynamicTileFile(fname, xdim, ydim, TileFileFormat.TILESET, new String[0]); tfile = addonfiles.get(fid); if (tfile == null) { Log.severe("Error registering tile set " + fname + " at " + rdr.getLineNumber() + " of " + txtname); return; } /* Initialize tile name map and set directory path */ tfile.tilenames = new String[tfile.tile_to_dyntile.length]; tfile.setdir = setdir; } else { Log.severe("Error defining tile set at " + rdr.getLineNumber() + " of " + txtname); return; } } else if(Character.isDigit(line.charAt(0))) { /* Starts with digit? tile mapping */ int split = line.indexOf('-'); /* Find first dash */ if(split < 0) continue; String id = line.substring(0, split).trim(); String name = line.substring(split+1).trim(); String[] coord = id.split(","); int idx = -1; if(coord.length == 2) { /* If x,y */ idx = (Integer.parseInt(coord[1]) * tfile.tilecnt_x) + Integer.parseInt(coord[0]); } else if(coord.length == 1) { /* Just index */ idx = Integer.parseInt(coord[0]); } if((idx >= 0) && (idx < tfile.tilenames.length)) { tfile.tilenames[idx] = name; } else { Log.severe("Bad tile index - line " + rdr.getLineNumber() + " of " + txtname); } } } } catch (IOException iox) { Log.severe("Error reading " + txtname + " - " + iox.toString()); } catch (NumberFormatException nfx) { Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname + ": " + nfx.getMessage()); } finally { if(rdr != null) { try { rdr.close(); rdr = null; } catch (IOException e) { } } } } /** * Load texture pack mappings from texture.txt file */ private static void loadTextureFile(InputStream txtfile, String txtname, ConfigurationNode config, DynmapCore core, String blockset) { LineNumberReader rdr = null; int cnt = 0; HashMap<String,Integer> filetoidx = new HashMap<String,Integer>(); HashMap<String,Integer> varvals = new HashMap<String,Integer>(); boolean mod_cfg_needed = false; String modname = null; try { String line; rdr = new LineNumberReader(new InputStreamReader(txtfile)); while((line = rdr.readLine()) != null) { if(line.startsWith("block:")) { ArrayList<Integer> blkids = new ArrayList<Integer>(); int databits = -1; int srctxtid = -1; int faces[] = new int[] { TILEINDEX_BLANK, TILEINDEX_BLANK, TILEINDEX_BLANK, TILEINDEX_BLANK, TILEINDEX_BLANK, TILEINDEX_BLANK }; int txtidx[] = new int[] { -1, -1, -1, -1, -1, -1 }; byte layers[] = null; line = line.substring(6); BlockTransparency trans = BlockTransparency.OPAQUE; int colorMult = 0; boolean stdrot = false; // Legacy top/bottom rotation CustomColorMultiplier custColorMult = null; String[] args = line.split(","); for(String a : args) { String[] av = a.split("="); if(av.length < 2) continue; else if(av[0].equals("txtid")) { if(filetoidx.containsKey(av[1])) srctxtid = filetoidx.get(av[1]); else Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname + ": bad texture " + av[1]); } } boolean userenderdata = false; for(String a : args) { String[] av = a.split("="); if(av.length < 2) continue; if(av[0].equals("id")) { blkids.add(getIntValue(varvals, av[1])); } else if(av[0].equals("data")) { if(databits < 0) databits = 0; if(av[1].equals("*")) databits = 0xFFFF; else databits |= (1 << getIntValue(varvals,av[1])); } else if(av[0].equals("top") || av[0].equals("y-") || av[0].equals("face1")) { faces[BlockStep.Y_MINUS.ordinal()] = parseTextureIndex(filetoidx, srctxtid, av[1]); } else if(av[0].equals("bottom") || av[0].equals("y+") || av[0].equals("face0")) { faces[BlockStep.Y_PLUS.ordinal()] = parseTextureIndex(filetoidx, srctxtid, av[1]); } else if(av[0].equals("north") || av[0].equals("x+") || av[0].equals("face4")) { faces[BlockStep.X_PLUS.ordinal()] = parseTextureIndex(filetoidx, srctxtid, av[1]); } else if(av[0].equals("south") || av[0].equals("x-") || av[0].equals("face5")) { faces[BlockStep.X_MINUS.ordinal()] = parseTextureIndex(filetoidx, srctxtid, av[1]); } else if(av[0].equals("west") || av[0].equals("z-") || av[0].equals("face3")) { faces[BlockStep.Z_MINUS.ordinal()] = parseTextureIndex(filetoidx, srctxtid, av[1]); } else if(av[0].equals("east") || av[0].equals("z+") || av[0].equals("face2")) { faces[BlockStep.Z_PLUS.ordinal()] = parseTextureIndex(filetoidx, srctxtid, av[1]); } else if(av[0].equals("allfaces")) { int id = parseTextureIndex(filetoidx, srctxtid, av[1]); for(int i = 0; i < 6; i++) { faces[i] = id; } } else if(av[0].equals("allsides")) { int id = parseTextureIndex(filetoidx, srctxtid, av[1]); faces[BlockStep.X_PLUS.ordinal()] = id; faces[BlockStep.X_MINUS.ordinal()] = id; faces[BlockStep.Z_PLUS.ordinal()] = id; faces[BlockStep.Z_MINUS.ordinal()] = id; } else if(av[0].equals("topbottom")) { faces[BlockStep.Y_MINUS.ordinal()] = faces[BlockStep.Y_PLUS.ordinal()] = parseTextureIndex(filetoidx, srctxtid, av[1]); } else if(av[0].startsWith("patch")) { int patchid0, patchid1; String idrange = av[0].substring(5); String[] ids = idrange.split("-"); if(ids.length > 1) { patchid0 = Integer.parseInt(ids[0]); patchid1 = Integer.parseInt(ids[1]); } else { patchid0 = patchid1 = Integer.parseInt(ids[0]); } if((patchid0 < 0) || (patchid1 < patchid0)) { Log.severe("Texture mapping has invalid patch index - " + av[1] + " - line " + rdr.getLineNumber() + " of " + txtname); return; } if(faces.length <= patchid1) { int[] newfaces = new int[patchid1+1]; Arrays.fill(newfaces, TILEINDEX_BLANK); System.arraycopy(faces, 0, newfaces, 0, faces.length); faces = newfaces; int[] newtxtidx = new int[patchid1+1]; Arrays.fill(newtxtidx, -1); System.arraycopy(txtidx, 0, newtxtidx, 0, txtidx.length); txtidx = newtxtidx; } int txtid = parseTextureIndex(filetoidx, srctxtid, av[1]); for(int i = patchid0; i <= patchid1; i++) { faces[i] = txtid; } } else if(av[0].equals("transparency")) { trans = BlockTransparency.valueOf(av[1]); if(trans == null) { trans = BlockTransparency.OPAQUE; Log.severe("Texture mapping has invalid transparency setting - " + av[1] + " - line " + rdr.getLineNumber() + " of " + txtname); } /* For leaves, base on leaf transparency setting */ if(trans == BlockTransparency.LEAVES) { if(core.getLeafTransparency()) trans = BlockTransparency.TRANSPARENT; else trans = BlockTransparency.OPAQUE; } /* If no water lighting fix */ if((blkids.contains(8) || blkids.contains(9)) && (HDMapManager.waterlightingfix == false)) { trans = BlockTransparency.TRANSPARENT; /* Treat water as transparent if no fix */ } } else if(av[0].equals("userenderdata")) { userenderdata = av[1].equals("true"); } else if(av[0].equals("colorMult")) { colorMult = (int)Long.parseLong(av[1], 16); } else if(av[0].equals("custColorMult")) { try { Class<?> cls = Class.forName(av[1]); custColorMult = (CustomColorMultiplier)cls.newInstance(); } catch (Exception x) { Log.severe("Error loading custom color multiplier - " + av[1] + ": " + x.getMessage()); } } else if(av[0].equals("stdrot")) { stdrot = av[1].equals("true"); } } for(String a : args) { String[] av = a.split("="); if(av.length < 2) continue; if(av[0].startsWith("layer")) { if(layers == null) { layers = new byte[faces.length]; Arrays.fill(layers, (byte)-1); } String v[] = av[0].substring(5).split("-"); int id1, id2; id1 = id2 = Integer.parseInt(v[0]); if(v.length > 1) { id2 = Integer.parseInt(v[1]); } byte val = (byte)Integer.parseInt(av[1]); for(; id1 <= id2; id1++) { layers[id1] = val; } } } /* If no data bits, assume all */ if(databits < 0) databits = 0xFFFF; /* If we have everything, build block */ if(blkids.size() > 0) { HDTextureMap map = new HDTextureMap(blkids, databits, faces, layers, trans, userenderdata, colorMult, custColorMult, blockset, stdrot); map.addToTable(); cnt++; } else { Log.severe("Texture mapping missing required parameters = line " + rdr.getLineNumber() + " of " + txtname); } } else if(line.startsWith("addtotexturemap:")) { int srctxtid = -1; String mapid = null; line = line.substring(line.indexOf(':') + 1); String[] args = line.split(","); for(String a : args) { String[] av = a.split("="); if(av.length < 2) continue; else if(av[0].equals("txtid")) { if(filetoidx.containsKey(av[1])) srctxtid = filetoidx.get(av[1]); else Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname); } else if(av[0].equals("mapid")) { mapid = av[1]; } } if(mapid != null) { for(String a : args) { String[] av = a.split("="); if(av.length < 2) continue; if(av[0].startsWith("key:")) { addTextureByKey(mapid, getIntValue(varvals, av[0].substring(4)), parseTextureIndex(filetoidx, srctxtid, av[1])); } } } else { Log.severe("Missing mapid - line " + rdr.getLineNumber() + " of " + txtname); } } else if(line.startsWith("texturemap:")) { ArrayList<Integer> blkids = new ArrayList<Integer>(); int databits = -1; String mapid = null; line = line.substring(line.indexOf(':') + 1); BlockTransparency trans = BlockTransparency.OPAQUE; int colorMult = 0; CustomColorMultiplier custColorMult = null; String[] args = line.split(","); boolean userenderdata = false; for(String a : args) { String[] av = a.split("="); if(av.length < 2) continue; if(av[0].equals("id")) { blkids.add(getIntValue(varvals, av[1])); } else if(av[0].equals("mapid")) { mapid = av[1]; } else if(av[0].equals("data")) { if(databits < 0) databits = 0; if(av[1].equals("*")) databits = 0xFFFF; else databits |= (1 << getIntValue(varvals,av[1])); } else if(av[0].equals("transparency")) { trans = BlockTransparency.valueOf(av[1]); if(trans == null) { trans = BlockTransparency.OPAQUE; Log.severe("Texture mapping has invalid transparency setting - " + av[1] + " - line " + rdr.getLineNumber() + " of " + txtname); } /* For leaves, base on leaf transparency setting */ if(trans == BlockTransparency.LEAVES) { if(core.getLeafTransparency()) trans = BlockTransparency.TRANSPARENT; else trans = BlockTransparency.OPAQUE; } /* If no water lighting fix */ if((blkids.contains(8) || blkids.contains(9)) && (HDMapManager.waterlightingfix == false)) { trans = BlockTransparency.TRANSPARENT; /* Treat water as transparent if no fix */ } } else if(av[0].equals("userenderdata")) { userenderdata = av[1].equals("true"); } else if(av[0].equals("colorMult")) { colorMult = Integer.valueOf(av[1], 16); } else if(av[0].equals("custColorMult")) { try { Class<?> cls = Class.forName(av[1]); custColorMult = (CustomColorMultiplier)cls.newInstance(); } catch (Exception x) { Log.severe("Error loading custom color multiplier - " + av[1] + ": " + x.getMessage()); } } } /* If no data bits, assume all */ if(databits < 0) databits = 0xFFFF; /* If we have everything, build texture map */ if((blkids.size() > 0) && (mapid != null)) { addTextureIndex(mapid, blkids, databits, trans, userenderdata, colorMult, custColorMult, blockset); } else { Log.severe("Texture map missing required parameters = line " + rdr.getLineNumber() + " of " + txtname); } } else if(line.startsWith("texturefile:")) { line = line.substring(line.indexOf(':')+1); String[] args = line.split(","); int xdim = 16, ydim = 16; String fname = null; String id = null; TileFileFormat fmt = TileFileFormat.GRID; for(String arg : args) { String[] aval = arg.split("="); if(aval.length < 2) continue; if(aval[0].equals("id")) id = aval[1]; else if(aval[0].equals("filename")) fname = aval[1]; else if(aval[0].equals("xcount")) xdim = Integer.parseInt(aval[1]); else if(aval[0].equals("ycount")) ydim = Integer.parseInt(aval[1]); else if(aval[0].equals("format")) { fmt = TileFileFormat.valueOf(aval[1].toUpperCase()); if(fmt == null) { Log.severe("Invalid format type " + aval[1] + " - line " + rdr.getLineNumber() + " of " + txtname); return; } } } if((fname != null) && (id != null)) { /* Register the file */ int fid = findOrAddDynamicTileFile(fname, xdim, ydim, fmt, args); filetoidx.put(id, fid); /* Save lookup */ } else { Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname); return; } } else if(line.startsWith("#") || line.startsWith(";")) { } else if(line.startsWith("enabled:")) { /* Test if texture file is enabled */ line = line.substring(8).trim(); if(line.startsWith("true")) { /* We're enabled? */ /* Nothing to do - keep processing */ } else if(line.startsWith("false")) { /* Disabled */ return; /* Quit */ } /* If setting is not defined or false, quit */ else if(config.getBoolean(line, false) == false) { return; } else { Log.info(line + " textures enabled"); } } else if(line.startsWith("var:")) { /* Test if variable declaration */ line = line.substring(4).trim(); String args[] = line.split(","); for(int i = 0; i < args.length; i++) { String[] v = args[i].split("="); if(v.length < 2) { Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname); return; } try { int val = Integer.valueOf(v[1]); /* Parse default value */ int parmval = config.getInteger(v[0], val); /* Read value, with applied default */ varvals.put(v[0], parmval); /* And save value */ } catch (NumberFormatException nfx) { Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname + ": " + nfx.getMessage()); return; } } } else if(line.startsWith("cfgfile:")) { /* If config file */ File cfgfile = new File(line.substring(8).trim()); ForgeConfigFile cfg = new ForgeConfigFile(cfgfile); if(cfg.load()) { cfg.addBlockIDs(varvals); mod_cfg_needed = false; } } else if(line.startsWith("modname:")) { String[] names = line.substring(8).split(","); boolean found = false; for(String n : names) { if(core.getServer().isModLoaded(n.trim()) == true) { found = true; Log.info(n + " textures enabled"); mod_cfg_needed = true; modname = n.trim(); break; } } if(!found) return; } else if(line.startsWith("biome:")) { line = line.substring(6).trim(); String args[] = line.split(","); int id = 0; int grasscolormult = -1; int foliagecolormult = -1; int watercolormult = -1; double rain = -1.0; double tmp = -1.0; for(int i = 0; i < args.length; i++) { String[] v = args[i].split("="); if(v.length < 2) { Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname); return; } if(v[0].equals("id")) { id = getIntValue(varvals, v[1]); } else if(v[0].equals("grassColorMult")) { grasscolormult = Integer.valueOf(v[1], 16); } else if(v[0].equals("foliageColorMult")) { foliagecolormult = Integer.valueOf(v[1], 16); } else if(v[0].equals("waterColorMult")) { watercolormult = Integer.valueOf(v[1], 16); } else if(v[0].equals("temp")) { tmp = Double.parseDouble(v[1]); } else if(v[0].equals("rain")) { rain = Double.parseDouble(v[1]); } } if(id > 0) { BiomeMap b = BiomeMap.byBiomeID(id); /* Find biome */ if(b == null) { Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname + ": " + id); } else { if(foliagecolormult != -1) b.setFoliageColorMultiplier(foliagecolormult); if(grasscolormult != -1) b.setGrassColorMultiplier(grasscolormult); if(watercolormult != -1) b.setWaterColorMultiplier(watercolormult); if(tmp != -1.0) b.setTemperature(tmp); if(rain != -1.0) b.setRainfall(rain); } } } else if(line.startsWith("version:")) { line = line.substring(line.indexOf(':')+1); String mcver = core.getDynmapPluginPlatformVersion(); String[] split = line.split("-"); if(split.length == 1) { /* Only one */ if(!mcver.equals(split[0])) { // If not match return; } } else if(split.length == 2) { /* Two : range */ if( (split[0].equals("") || (split[0].compareTo(mcver) <= 0)) && (split[1].equals("") || (split[1].compareTo(mcver) >= 0))) { } else { return; } } else { Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname + ": " + line); } } } if(mod_cfg_needed) { Log.severe("Error loading configuration file for " + modname); } Log.verboseinfo("Loaded " + cnt + " texture mappings from " + txtname); } catch (IOException iox) { Log.severe("Error reading " + txtname + " - " + iox.toString()); } catch (NumberFormatException nfx) { Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname + ": " + nfx.getMessage()); } finally { if(rdr != null) { try { rdr.close(); rdr = null; } catch (IOException e) { } } } } /* Process any ore hiding mappings */ public static void handleHideOres() { /* Now, fix mapping if we're hiding any ores */ if(MapManager.mapman.getHideOres()) { for(int i = 0; i < 256; i++) { int id = MapManager.mapman.getBlockIDAlias(i); if(id != i) { /* New mapping? */ HDTextureMap.remapTexture(i, id); } } } } private static final int BLOCKID_GRASS = 2; private static final int BLOCKID_SNOW = 78; /** * Read color for given subblock coordinate, with given block id and data and face */ public final void readColor(final HDPerspectiveState ps, final MapIterator mapiter, final Color rslt, final int blkid, final int lastblocktype, final TexturePackHDShader.ShaderState ss) { int blkdata = ps.getBlockData(); HDTextureMap map = HDTextureMap.getMap(blkid, blkdata, ps.getBlockRenderData()); BlockStep laststep = ps.getLastBlockStep(); int patchid = ps.getTextureIndex(); /* See if patch index */ int textid; int faceindex; if(patchid >= 0) { faceindex = patchid; } else { faceindex = laststep.ordinal(); } textid = map.faces[faceindex]; if (ctm != null) { int mod = 0; if(textid >= COLORMOD_MULT_INTERNAL) { mod = (textid / COLORMOD_MULT_INTERNAL) * COLORMOD_MULT_INTERNAL; textid -= mod; } textid = mod + ctm.mapTexture(mapiter, blkid, blkdata, laststep, textid, ss); } readColor(ps, mapiter, rslt, blkid, lastblocktype, ss, blkdata, map, laststep, patchid, textid, map.stdrotate); if(map.layers != null) { /* If layered */ /* While transparent and more layers */ while(rslt.isTransparent() && (map.layers[faceindex] >= 0)) { faceindex = map.layers[faceindex]; textid = map.faces[faceindex]; readColor(ps, mapiter, rslt, blkid, lastblocktype, ss, blkdata, map, laststep, patchid, textid, map.stdrotate); } } } /** * Read color for given subblock coordinate, with given block id and data and face */ private final void readColor(final HDPerspectiveState ps, final MapIterator mapiter, final Color rslt, final int blkid, final int lastblocktype, final TexturePackHDShader.ShaderState ss, int blkdata, HDTextureMap map, BlockStep laststep, int patchid, int textid, boolean stdrot) { if(textid < 0) { rslt.setTransparent(); return; } int blkindex = indexByIDMeta(blkid, blkdata); boolean hasblockcoloring = ss.do_biome_shading && hasBlockColoring.get(blkindex); // Test if we have no texture modifications boolean simplemap = (textid < COLORMOD_MULT_INTERNAL) && (!hasblockcoloring); if (simplemap) { /* If simple mapping */ int[] texture = terrain_argb[textid]; /* Get texture coordinates (U=horizontal(left=0),V=vertical(top=0)) */ int u = 0, v = 0; /* If not patch, compute U and V */ if(patchid < 0) { int[] xyz = ps.getSubblockCoord(); switch(laststep) { case X_MINUS: /* South face: U = East (Z-), V = Down (Y-) */ u = native_scale-xyz[2]-1; v = native_scale-xyz[1]-1; break; case X_PLUS: /* North face: U = West (Z+), V = Down (Y-) */ u = xyz[2]; v = native_scale-xyz[1]-1; break; case Z_MINUS: /* West face: U = South (X+), V = Down (Y-) */ u = xyz[0]; v = native_scale-xyz[1]-1; break; case Z_PLUS: /* East face: U = North (X-), V = Down (Y-) */ u = native_scale-xyz[0]-1; v = native_scale-xyz[1]-1; break; case Y_MINUS: /* U = East(Z-), V = South(X+) */ if(stdrot) { u = xyz[0]; v = xyz[2]; } else { u = native_scale-xyz[2]-1; v = xyz[0]; } break; case Y_PLUS: if(stdrot) { u = native_scale-xyz[0]-1; v = xyz[2]; } else { u = xyz[2]; v = xyz[0]; } break; } } else { u = fastFloor(ps.getPatchU() * native_scale); v = native_scale - fastFloor(ps.getPatchV() * native_scale) - 1; } /* Read color from texture */ try { rslt.setARGB(texture[v*native_scale + u]); } catch(ArrayIndexOutOfBoundsException aoobx) { u = ((u < 0) ? 0 : ((u >= native_scale) ? (native_scale-1) : u)); v = ((v < 0) ? 0 : ((v >= native_scale) ? (native_scale-1) : v)); try { rslt.setARGB(texture[v*native_scale + u]); } catch(ArrayIndexOutOfBoundsException oob2) { } } return; } /* See if not basic block texture */ int textop = textid / COLORMOD_MULT_INTERNAL; textid = textid % COLORMOD_MULT_INTERNAL; /* If clear-inside op, get out early */ if((textop == COLORMOD_CLEARINSIDE) || (textop == COLORMOD_MULTTONED_CLEARINSIDE)) { /* Check if previous block is same block type as we are: surface is transparent if it is */ if(blkid == lastblocktype) { rslt.setTransparent(); return; } /* If water block, to watercolor tone op */ if((blkid == 8) || (blkid == 9)) { textop = water_toned_op; } else if(textop == COLORMOD_MULTTONED_CLEARINSIDE) { textop = COLORMOD_MULTTONED; } } int[] texture = terrain_argb[textid]; /* Get texture coordinates (U=horizontal(left=0),V=vertical(top=0)) */ int u = 0, v = 0, tmp; if(patchid < 0) { int[] xyz = ps.getSubblockCoord(); switch(laststep) { case X_MINUS: /* South face: U = East (Z-), V = Down (Y-) */ u = native_scale-xyz[2]-1; v = native_scale-xyz[1]-1; break; case X_PLUS: /* North face: U = West (Z+), V = Down (Y-) */ u = xyz[2]; v = native_scale-xyz[1]-1; break; case Z_MINUS: /* West face: U = South (X+), V = Down (Y-) */ u = xyz[0]; v = native_scale-xyz[1]-1; break; case Z_PLUS: /* East face: U = North (X-), V = Down (Y-) */ u = native_scale-xyz[0]-1; v = native_scale-xyz[1]-1; break; case Y_MINUS: /* U = East(Z-), V = South(X+) */ if(stdrot) { u = xyz[0]; v = xyz[2]; } else { u = native_scale-xyz[2]-1; v = xyz[0]; } break; case Y_PLUS: if(stdrot) { u = native_scale-xyz[0]-1; v = xyz[2]; } else { u = xyz[2]; v = xyz[0]; } break; } } else { u = fastFloor(ps.getPatchU() * native_scale); v = native_scale - fastFloor(ps.getPatchV() * native_scale) - 1; } /* Handle U-V transorms before fetching color */ switch(textop) { case COLORMOD_ROT90: tmp = u; u = native_scale - v - 1; v = tmp; break; case COLORMOD_ROT180: u = native_scale - u - 1; v = native_scale - v - 1; break; case COLORMOD_ROT270: case COLORMOD_GRASSTONED270: case COLORMOD_FOLIAGETONED270: case COLORMOD_WATERTONED270: tmp = u; u = v; v = native_scale - tmp - 1; break; case COLORMOD_FLIPHORIZ: u = native_scale - u - 1; break; case COLORMOD_SHIFTDOWNHALF: if(v < native_scale/2) { rslt.setTransparent(); return; } v -= native_scale/2; break; case COLORMOD_SHIFTDOWNHALFANDFLIPHORIZ: if(v < native_scale/2) { rslt.setTransparent(); return; } v -= native_scale/2; u = native_scale - u - 1; break; case COLORMOD_INCLINEDTORCH: if(v >= (3*native_scale/4)) { rslt.setTransparent(); return; } v += native_scale/4; if(u < native_scale/2) u = native_scale/2-1; if(u > native_scale/2) u = native_scale/2; break; case COLORMOD_GRASSSIDE: boolean do_grass_side = false; boolean do_snow_side = false; if(ss.do_better_grass) { mapiter.unstepPosition(laststep); if(mapiter.getBlockTypeID() == BLOCKID_SNOW) do_snow_side = true; if(mapiter.getBlockTypeIDAt(BlockStep.Y_MINUS) == BLOCKID_GRASS) do_grass_side = true; mapiter.stepPosition(laststep); } /* Check if snow above block */ if(mapiter.getBlockTypeIDAt(BlockStep.Y_PLUS) == BLOCKID_SNOW) { if(do_snow_side) { texture = terrain_argb[TILEINDEX_SNOW]; /* Snow full side block */ textid = TILEINDEX_SNOW; } else { texture = terrain_argb[TILEINDEX_SNOWSIDE]; /* Snow block */ textid = TILEINDEX_SNOWSIDE; } textop = 0; } else { /* Else, check the grass color overlay */ if(do_grass_side) { texture = terrain_argb[TILEINDEX_GRASS]; /* Grass block */ textid = TILEINDEX_GRASS; textop = COLORMOD_GRASSTONED; /* Force grass toning */ } else { int ovclr = terrain_argb[TILEINDEX_GRASSMASK][v*native_scale+u]; if((ovclr & 0xFF000000) != 0) { /* Hit? */ texture = terrain_argb[TILEINDEX_GRASSMASK]; /* Use it */ textop = COLORMOD_GRASSTONED; /* Force grass toning */ } } } break; case COLORMOD_LILYTONED: /* Rotate texture based on lily orientation function (from renderBlockLilyPad in RenderBlocks.jara in MCP) */ long l1 = (long)(mapiter.getX() * 0x2fc20f) ^ (long)mapiter.getZ() * 0x6ebfff5L ^ (long)mapiter.getY(); l1 = l1 * l1 * 0x285b825L + l1 * 11L; int orientation = (int)(l1 >> 16 & 3L); switch(orientation) { case 0: tmp = u; u = native_scale - v - 1; v = tmp; break; case 1: u = native_scale - u - 1; v = native_scale - v - 1; break; case 2: tmp = u; u = v; v = native_scale - tmp - 1; break; case 3: break; } break; } /* Read color from texture */ try { rslt.setARGB(texture[v*native_scale + u]); } catch (ArrayIndexOutOfBoundsException aioobx) { rslt.setARGB(0); } int clrmult = -1; int clralpha = 0xFF000000; // If block has custom coloring if (hasblockcoloring) { Integer idx = (Integer) this.blockColoring.get(blkindex); LoadedImage img = imgs[idx]; if (img.argb != null) { clrmult = mapiter.getSmoothWaterColorMultiplier(img.argb); } else { hasblockcoloring = false; } } if (!hasblockcoloring) { // Switch based on texture modifier switch(textop) { case COLORMOD_GRASSTONED: case COLORMOD_GRASSTONED270: if(ss.do_biome_shading) { if(imgs[IMG_SWAMPGRASSCOLOR] != null) clrmult = mapiter.getSmoothColorMultiplier(imgs[IMG_GRASSCOLOR].argb, imgs[IMG_SWAMPGRASSCOLOR].argb); else clrmult = mapiter.getSmoothGrassColorMultiplier(imgs[IMG_GRASSCOLOR].argb); } else { clrmult = imgs[IMG_GRASSCOLOR].trivial_color; } break; case COLORMOD_FOLIAGETONED: case COLORMOD_FOLIAGETONED270: if(ss.do_biome_shading) { if(imgs[IMG_SWAMPFOLIAGECOLOR] != null) clrmult = mapiter.getSmoothColorMultiplier(imgs[IMG_FOLIAGECOLOR].argb, imgs[IMG_SWAMPFOLIAGECOLOR].argb); else clrmult = mapiter.getSmoothFoliageColorMultiplier(imgs[IMG_FOLIAGECOLOR].argb); } else { clrmult = imgs[IMG_FOLIAGECOLOR].trivial_color; } break; case COLORMOD_FOLIAGEMULTTONED: if(ss.do_biome_shading) { if(imgs[IMG_SWAMPFOLIAGECOLOR] != null) clrmult = mapiter.getSmoothColorMultiplier(imgs[IMG_FOLIAGECOLOR].argb, imgs[IMG_SWAMPFOLIAGECOLOR].argb); else clrmult = mapiter.getSmoothFoliageColorMultiplier(imgs[IMG_FOLIAGECOLOR].argb); } else { clrmult = imgs[IMG_FOLIAGECOLOR].trivial_color; } if(map.custColorMult != null) { clrmult = ((clrmult & 0xFEFEFE) + map.custColorMult.getColorMultiplier(mapiter)) / 2; } else { clrmult = ((clrmult & 0xFEFEFE) + map.colorMult) / 2; } break; case COLORMOD_WATERTONED: case COLORMOD_WATERTONED270: if(imgs[IMG_WATERCOLORX] != null) { if(ss.do_biome_shading) { clrmult = mapiter.getSmoothWaterColorMultiplier(imgs[IMG_WATERCOLORX].argb); } else { clrmult = imgs[IMG_WATERCOLORX].trivial_color; } } else { if(ss.do_biome_shading) clrmult = mapiter.getSmoothWaterColorMultiplier(); } break; case COLORMOD_BIRCHTONED: clrmult = 0x80a755; /* From ColorizerFoliage.java in MCP */ break; case COLORMOD_PINETONED: clrmult = 0x619961; /* From ColorizerFoliage.java in MCP */ break; case COLORMOD_LILYTONED: clrmult = 0x208030; /* from BlockLilyPad.java in MCP */ break; case COLORMOD_MULTTONED: /* Use color multiplier */ if(map.custColorMult != null) { clrmult = map.custColorMult.getColorMultiplier(mapiter); } else { clrmult = map.colorMult; } if((clrmult & 0xFF000000) != 0) { clralpha = clrmult; } break; } } if((clrmult != -1) && (clrmult != 0)) { rslt.blendColor(clrmult | clralpha); } } private static final void makeAlphaPure(int[] argb) { for(int i = 0; i < argb.length; i++) { if((argb[i] & 0xFF000000) != 0) argb[i] |= 0xFF000000; } } private static final int fastFloor(double f) { return ((int)(f + 1000000000.0)) - 1000000000; } /** * Get tile index, based on tile file name and relative index within tile file * @param fname - filename * @param idx - tile index (= (y * xdim) + x) * @return global tile index, or -1 if not found */ public static int findDynamicTile(String fname, int idx) { DynamicTileFile f; /* Find existing, if already there */ f = addonfilesbyname.get(fname); if (f != null) { if ((idx >= 0) && (idx < f.tile_to_dyntile.length) && (f.tile_to_dyntile[idx] >= 0)) { return f.tile_to_dyntile[idx]; } } return -1; } /** * Add new dynmaic file definition, or return existing * * @param fname * @param xdim * @param ydim * @param fmt * @param args * @return dynamic file index */ public static int findOrAddDynamicTileFile(String fname, int xdim, int ydim, TileFileFormat fmt, String[] args) { DynamicTileFile f; /* Find existing, if already there */ f = addonfilesbyname.get(fname); if (f != null) { return f.idx; } /* Add new tile file entry */ f = new DynamicTileFile(); f.filename = fname; f.tilecnt_x = xdim; f.tilecnt_y = ydim; f.format = fmt; switch(fmt) { case GRID: f.tile_to_dyntile = new int[xdim*ydim]; break; case CHEST: f.tile_to_dyntile = new int[TILEINDEX_CHEST_COUNT]; /* 6 images for chest tile */ break; case BIGCHEST: f.tile_to_dyntile = new int[TILEINDEX_BIGCHEST_COUNT]; /* 10 images for chest tile */ break; case SIGN: f.tile_to_dyntile = new int[TILEINDEX_SIGN_COUNT]; /* 10 images for sign tile */ break; case CUSTOM: { List<CustomTileRec> recs = new ArrayList<CustomTileRec>(); for(String a : args) { String[] v = a.split("="); if(v.length != 2) continue; if(v[0].startsWith("tile")) { int id = 0; try { id = Integer.parseInt(v[0].substring(4)); } catch (NumberFormatException nfx) { Log.warning("Bad tile ID: " + v[0]); continue; } while(recs.size() <= id) { recs.add(null); } CustomTileRec rec = new CustomTileRec(); try { String[] coords = v[1].split("/"); String[] topleft = coords[0].split(":"); rec.srcx = Integer.parseInt(topleft[0]); rec.srcy = Integer.parseInt(topleft[1]); String[] size = coords[1].split(":"); rec.width = Integer.parseInt(size[0]); rec.height = Integer.parseInt(size[1]); if(coords.length >= 3) { String[] dest = coords[2].split(":"); rec.targetx = Integer.parseInt(dest[0]); rec.targety = Integer.parseInt(dest[1]); } recs.set(id, rec); } catch (Exception x) { Log.warning("Bad custom tile coordinate: " + v[1]); } } } f.tile_to_dyntile = new int[recs.size()]; f.cust = recs; } break; case SKIN: f.tile_to_dyntile = new int[TILEINDEX_SKIN_COUNT]; /* 6 images for skin tile */ break; case TILESET: f.tile_to_dyntile = new int[xdim*ydim]; break; default: f.tile_to_dyntile = new int[xdim*ydim]; break; } Arrays.fill(f.tile_to_dyntile, -1); f.idx = addonfiles.size(); addonfiles.add(f); addonfilesbyname.put(f.filename, f); //Log.info("File " + fname + "(" + f.idx + ")=" + fmt.toString()); return f.idx; } /** * Add or find dynamic tile index of given dynamic tile * @param dynfile_idx - index of file * @param tile_id - ID of tile within file * @return global tile ID */ public static int findOrAddDynamicTile(int dynfile_idx, int tile_id) { DynamicTileFile f = addonfiles.get(dynfile_idx); if(f == null) { Log.warning("Invalid add-on file index: " + dynfile_idx); return 0; } if(f.tile_to_dyntile[tile_id] < 0) { /* Not assigned yet? */ f.tile_to_dyntile[tile_id] = next_dynamic_tile; next_dynamic_tile++; /* Allocate next ID */ } return f.tile_to_dyntile[tile_id]; } private static final int[] smooth_water_mult = new int[10]; public static int getTextureIDAt(MapIterator mapiter, int blkdata, int blkmeta, BlockStep face) { HDTextureMap map = HDTextureMap.getMap(blkdata, blkmeta, blkmeta); int idx = -1; if (map != null) { int sideidx = face.ordinal(); if (map.faces != null) { if (sideidx < map.faces.length) idx = map.faces[sideidx]; else idx = map.faces[0]; } } if(idx > 0) idx = idx % COLORMOD_MULT_INTERNAL; return idx; } private static final String PALETTE_BLOCK_KEY = "palette.block."; private void processCustomColorMap(String fname, String ids) { // Register file name int idx = findOrAddDynamicTileFile(fname, 1, 1, TileFileFormat.BIOME, new String[0]); if(idx < 0) { Log.info("Error registering custom color file: " + fname); return; } Integer index = idx + IMG_CNT; // Now, parse block ID list for (String id : ids.split("\\s+")) { String[] tok = id.split(":"); int meta = -1; int blkid = -1; if (tok.length == 1) { /* Only ID */ try { blkid = Integer.parseInt(tok[0]); } catch (NumberFormatException nfx) { Log.info("Bad custom color block ID: " + tok[0]); } } else if (tok.length == 2) { /* ID : meta */ try { blkid = Integer.parseInt(tok[0]); } catch (NumberFormatException nfx) { Log.info("Bad custom color block ID: " + tok[0]); } try { meta = Integer.parseInt(tok[1]); } catch (NumberFormatException nfx) { Log.info("Bad custom color meta ID: " + tok[1]); } } /* Add mappings for values */ if ((blkid > 0) && (blkid < 4096)) { if ((meta >= 0) && (meta < 16)) { int idm = indexByIDMeta(blkid, meta); this.hasBlockColoring.set(idm); this.blockColoring.put(idm, index); } else if (meta == -1) { /* All meta IDs */ for (meta = 0; meta < 16; meta++) { int idm = indexByIDMeta(blkid, meta); this.hasBlockColoring.set(idm); this.blockColoring.put(idm, index); } } } } } private void processCustomColors(Properties p) { // Loop through keys for(String pname : p.stringPropertyNames()) { if(!pname.startsWith(PALETTE_BLOCK_KEY)) continue; String v = p.getProperty(pname); String fname = pname.substring(PALETTE_BLOCK_KEY.length()).trim(); // Get filename of color map if(fname.charAt(0) == '/') fname = fname.substring(1); // Strip leading / processCustomColorMap(fname, v); } } private static final int indexByIDMeta(int blkid, int meta) { return ((blkid << 4) | meta); } static { /* * Generate smoothed swamp multipliers (indexed by swamp biome count) */ Color c = new Color(); for(int i = 0; i < 10; i++) { /* Use water color multiplier base for 1.1 (E0FFAE) */ int r = (((9-i) * 0xFF) + (i * 0xE0)) / 9; int g = 0xFF; int b = (((9-i) * 0xFF) + (i * 0xAE)) / 9; c.setRGBA(r & 0xFE, g & 0xFE, b & 0xFE, 0xFF); smooth_water_mult[i] = c.getARGB(); } } }
src/main/java/org/dynmap/hdmap/TexturePack.java
package org.dynmap.hdmap; import java.awt.image.BufferedImage; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.LineNumberReader; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import javax.imageio.ImageIO; import org.dynmap.Color; import org.dynmap.ConfigurationNode; import org.dynmap.DynmapCore; import org.dynmap.Log; import org.dynmap.MapManager; import org.dynmap.common.BiomeMap; import org.dynmap.renderer.CustomColorMultiplier; import org.dynmap.utils.BlockStep; import org.dynmap.utils.DynIntHashMap; import org.dynmap.utils.ForgeConfigFile; import org.dynmap.utils.MapIterator; /** * Loader and processor class for minecraft texture packs * Texture packs are found in dynmap/texturepacks directory, and either are either ZIP files * or are directories whose content matches the structure of a zipped texture pack: * ./terrain.png - main color data (required) * misc/grasscolor.png - tone for grass color, biome sensitive (required) * misc/foliagecolor.png - tone for leaf color, biome sensitive (required) * custom_lava_still.png - custom still lava animation (optional) * custom_lava_flowing.png - custom flowing lava animation (optional) * custom_water_still.png - custom still water animation (optional) * custom_water_flowing.png - custom flowing water animation (optional) * misc/watercolorX.png - custom water color multiplier (optional) * misc/swampgrasscolor.png - tone for grass color in swamps (optional) * misc/swampfoliagecolor.png - tone for leaf color in swamps (optional) */ public class TexturePack { /* Loaded texture packs */ private static HashMap<String, TexturePack> packs = new HashMap<String, TexturePack>(); private static Object packlock = new Object(); private static final String TERRAIN_PNG = "terrain.png"; private static final String GRASSCOLOR_PNG = "misc/grasscolor.png"; private static final String FOLIAGECOLOR_PNG = "misc/foliagecolor.png"; private static final String WATERCOLORX_PNG = "misc/watercolorX.png"; private static final String CUSTOMLAVASTILL_PNG = "custom_lava_still.png"; private static final String CUSTOMLAVAFLOWING_PNG = "custom_lava_flowing.png"; private static final String CUSTOMWATERSTILL_PNG = "custom_water_still.png"; private static final String CUSTOMWATERFLOWING_PNG = "custom_water_flowing.png"; private static final String SWAMPGRASSCOLOR_PNG = "misc/swampgrasscolor.png"; private static final String SWAMPFOLIAGECOLOR_PNG = "misc/swampfoliagecolor.png"; private static final String STANDARDTP = "standard"; /* Color modifier codes (x1000 for value in definition file, x1000000 for internal value) */ //private static final int COLORMOD_NONE = 0; private static final int COLORMOD_GRASSTONED = 1; private static final int COLORMOD_FOLIAGETONED = 2; private static final int COLORMOD_WATERTONED = 3; private static final int COLORMOD_ROT90 = 4; private static final int COLORMOD_ROT180 = 5; private static final int COLORMOD_ROT270 = 6; private static final int COLORMOD_FLIPHORIZ = 7; private static final int COLORMOD_SHIFTDOWNHALF = 8; private static final int COLORMOD_SHIFTDOWNHALFANDFLIPHORIZ = 9; private static final int COLORMOD_INCLINEDTORCH = 10; private static final int COLORMOD_GRASSSIDE = 11; private static final int COLORMOD_CLEARINSIDE = 12; private static final int COLORMOD_PINETONED = 13; private static final int COLORMOD_BIRCHTONED = 14; private static final int COLORMOD_LILYTONED = 15; //private static final int COLORMOD_OLD_WATERSHADED = 16; private static final int COLORMOD_MULTTONED = 17; /* Toned with colorMult or custColorMult - not biome-style */ private static final int COLORMOD_GRASSTONED270 = 18; // GRASSTONED + ROT270 private static final int COLORMOD_FOLIAGETONED270 = 19; // FOLIAGETONED + ROT270 private static final int COLORMOD_WATERTONED270 = 20; // WATERTONED + ROT270 private static final int COLORMOD_MULTTONED_CLEARINSIDE = 21; // MULTTONED + CLEARINSIDE private static final int COLORMOD_FOLIAGEMULTTONED = 22; // FOLIAGETONED + colorMult or custColorMult private static final int COLORMOD_MULT_FILE = 1000; private static final int COLORMOD_MULT_INTERNAL = 1000000; /* Special tile index values */ private static final int TILEINDEX_BLANK = -1; private static final int TILEINDEX_GRASS = 0; private static final int TILEINDEX_GRASSMASK = 38; private static final int TILEINDEX_SNOW = 66; private static final int TILEINDEX_SNOWSIDE = 68; private static final int TILEINDEX_PISTONSIDE = 108; private static final int TILEINDEX_GLASSPANETOP = 148; private static final int TILEINDEX_AIRFRAME = 158; private static final int TILEINDEX_REDSTONE_NSEW_TONE = 164; private static final int TILEINDEX_REDSTONE_EW_TONE = 165; private static final int TILEINDEX_EYEOFENDER = 174; private static final int TILEINDEX_REDSTONE_NSEW = 180; private static final int TILEINDEX_REDSTONE_EW = 181; private static final int TILEINDEX_STATIONARYWATER = 257; private static final int TILEINDEX_MOVINGWATER = 258; private static final int TILEINDEX_STATIONARYLAVA = 259; private static final int TILEINDEX_MOVINGLAVA = 260; private static final int TILEINDEX_PISTONEXTSIDE = 261; private static final int TILEINDEX_PISTONSIDE_EXT = 262; private static final int TILEINDEX_PANETOP_X = 263; private static final int TILEINDEX_AIRFRAME_EYE = 264; private static final int TILEINDEX_FIRE = 265; private static final int TILEINDEX_PORTAL = 266; private static final int MAX_TILEINDEX = 266; /* Index of last static tile definition */ private static final int TILETABLE_LEN = 5000; /* Leave room for dynmaic tiles */ /* Indexes of faces in a CHEST format tile file */ private static final int TILEINDEX_CHEST_TOP = 0; private static final int TILEINDEX_CHEST_LEFT = 1; private static final int TILEINDEX_CHEST_RIGHT = 2; private static final int TILEINDEX_CHEST_FRONT = 3; private static final int TILEINDEX_CHEST_BACK = 4; private static final int TILEINDEX_CHEST_BOTTOM = 5; private static final int TILEINDEX_CHEST_COUNT = 6; /* Indexes of faces in a BIGCHEST format tile file */ private static final int TILEINDEX_BIGCHEST_TOPLEFT = 0; private static final int TILEINDEX_BIGCHEST_TOPRIGHT = 1; private static final int TILEINDEX_BIGCHEST_FRONTLEFT = 2; private static final int TILEINDEX_BIGCHEST_FRONTRIGHT = 3; private static final int TILEINDEX_BIGCHEST_LEFT = 4; private static final int TILEINDEX_BIGCHEST_RIGHT = 5; private static final int TILEINDEX_BIGCHEST_BACKLEFT = 6; private static final int TILEINDEX_BIGCHEST_BACKRIGHT = 7; private static final int TILEINDEX_BIGCHEST_BOTTOMLEFT = 8; private static final int TILEINDEX_BIGCHEST_BOTTOMRIGHT = 9; private static final int TILEINDEX_BIGCHEST_COUNT = 10; /* Indexes of faces in the SIGN format tile file */ private static final int TILEINDEX_SIGN_FRONT = 0; private static final int TILEINDEX_SIGN_BACK = 1; private static final int TILEINDEX_SIGN_TOP = 2; private static final int TILEINDEX_SIGN_BOTTOM = 3; private static final int TILEINDEX_SIGN_LEFTSIDE = 4; private static final int TILEINDEX_SIGN_RIGHTSIDE = 5; private static final int TILEINDEX_SIGN_POSTFRONT = 6; private static final int TILEINDEX_SIGN_POSTBACK = 7; private static final int TILEINDEX_SIGN_POSTLEFT = 8; private static final int TILEINDEX_SIGN_POSTRIGHT = 9; private static final int TILEINDEX_SIGN_COUNT = 10; /* Indexes of faces in the SKIN format tile file */ private static final int TILEINDEX_SKIN_FACEFRONT = 0; private static final int TILEINDEX_SKIN_FACELEFT = 1; private static final int TILEINDEX_SKIN_FACERIGHT = 2; private static final int TILEINDEX_SKIN_FACEBACK = 3; private static final int TILEINDEX_SKIN_FACETOP = 4; private static final int TILEINDEX_SKIN_FACEBOTTOM = 5; private static final int TILEINDEX_SKIN_COUNT = 6; private static final int BLOCKTABLELEN = 256; /* Enough for normal block IDs */ public static enum TileFileFormat { GRID, CHEST, BIGCHEST, SIGN, SKIN, CUSTOM, TILESET, BIOME }; /* Map of 1.5 texture files to 0-255 texture indices */ private static final String[] terrain_map = { "grass_top", "stone", "dirt", "grass_side", "wood", "stoneslab_side", "stoneslab_top", "brick", "tnt_side", "tnt_top", "tnt_bottom", "web", "rose", "flower", "portal", "sapling", "stonebrick", "bedrock", "sand", "gravel", "tree_side", "tree_top", "blockIron", "blockGold", "blockDiamond", "blockEmerald", null, null, "mushroom_red", "mushroom_brown", "sapling_jungle", null, "oreGold", "oreIron", "oreCoal", "bookshelf", "stoneMoss", "obsidian", "grass_side_overlay", "tallgrass", null, "beacon", null, "workbench_top", "furnace_front", "furnace_side", "dispenser_front", null, "sponge", "glass", "oreDiamond", "oreRedstone", "leaves", "leaves_opaque", "stonebricksmooth", "deadbush", "fern", null, null, "workbench_side", "workbench_front", "furnace_front_lit", "furnace_top", "sapling_spruce", "cloth_0", "mobSpawner", "snow", "ice", "snow_side", "cactus_top", "cactus_side", "cactus_bottom", "clay", "reeds", "musicBlock", "jukebox_top", "waterlily", "mycel_side", "mycel_top", "sapling_birch", "torch", "doorWood_upper", "doorIron_upper", "ladder", "trapdoor", "fenceIron", "farmland_wet", "farmland_dry", "crops_0", "crops_1", "crops_2", "crops_3", "crops_4", "crops_5", "crops_6", "crops_7", "lever", "doorWood_lower", "doorIron_lower", "redtorch_lit", "stonebricksmooth_mossy", "stonebricksmooth_cracked", "pumpkin_top", "hellrock", "hellsand", "lightgem", "piston_top_sticky", "piston_top", "piston_side", "piston_bottom", "piston_inner_top", "stem_straight", "rail_turn", "cloth_15", "cloth_7", "redtorch", "tree_spruce", "tree_birch", "pumpkin_side", "pumpkin_face", "pumpkin_jack", "cake_top", "cake_side", "cake_inner", "cake_bottom", "mushroom_skin_red", "mushroom_skin_brown", "stem_bent", "rail", "cloth_14", "cloth_6", "repeater", "leaves_spruce", "leaves_spruce_opaque", "bed_feet_top", "bed_head_top", "melon_side", "melon_top", "cauldron_top", "cauldron_inner", null, "mushroom_skin_stem", "mushroom_inside", "vine", "blockLapis", "cloth_13", "cloth_5", "repeater_lit", "thinglass_top", "bed_feet_end", "bed_feet_side", "bed_head_side", "bed_head_end", "tree_jungle", "cauldron_side", "cauldron_bottom", "brewingStand_base", "brewingStand", "endframe_top", "endframe_side", "oreLapis", "cloth_12", "cloth_4", "goldenRail", "redstoneDust_cross", "redstoneDust_line", "enchantment_top", "dragonEgg", "cocoa_2", "cocoa_1", "cocoa_0", "oreEmerald", "tripWireSource", "tripWire", "endframe_eye", "whiteStone", "sandstone_top", "cloth_11", "cloth_3", "goldenRail_powered", "redstoneDust_cross_overlay", "redstoneDust_line_overlay", "enchantment_side", "enchantment_bottom", "commandBlock", "itemframe_back", "flowerPot", null, null, null, null, null, "sandstone_side", "cloth_10", "cloth_2", "detectorRail", "leaves_jungle", "leaves_jungle_opaque", "wood_spruce", "wood_jungle", "carrots_0", "carrots_1", "carrots_2", "carrots_3", "potatoes_3", null, null, null, "sandstone_bottom", "cloth_9", "cloth_1", "redstoneLight", "redstoneLight_lit", "stonebricksmooth_carved", "wood_birch", "anvil_base", "anvil_top_damaged_1", null, null, null, null, null, null, null, "netherBrick", "cloth_8", "netherStalk_0", "netherStalk_1", "netherStalk_2", "sandstone_carved", "sandstone_smooth", "anvil_top", "anvil_top_damaged_2", null, null, null, null, null, null, null, "destroy_0", "destroy_1", "destroy_2", "destroy_3", "destroy_4", "destroy_5", "destroy_6", "destroy_7", "destroy_8", "destroy_9", null, null, null, null, null, null, /* Extra 1.5-based textures: starting at 256 (corresponds to TILEINDEX_ values) */ null, "water", "water_flow", "lava", "lava_flow", null, null, null, null, "fire_0", "portal" }; private static class CustomTileRec { int srcx, srcy, width, height, targetx, targety; } private static int next_dynamic_tile = MAX_TILEINDEX+1; private static class DynamicTileFile { int idx; /* Index of tile in addonfiles */ String filename; int tilecnt_x, tilecnt_y; /* Number of tiles horizontally and vertically */ int tile_to_dyntile[]; /* Mapping from tile index in tile file to dynamic ID in global tile table (terrain_argb): 0=unassigned */ TileFileFormat format; List<CustomTileRec> cust; String[] tilenames; /* For TILESET, array of tilenames, indexed by tile index */ String setdir; /* For TILESET, directory of tile set in texture */ } private static ArrayList<DynamicTileFile> addonfiles = new ArrayList<DynamicTileFile>(); private static Map<String, DynamicTileFile> addonfilesbyname = new HashMap<String, DynamicTileFile>(); private static String getBlockFileName(int idx) { if ((idx >= 0) && (idx < terrain_map.length) && (terrain_map[idx] != null)) { return "textures/blocks/" + terrain_map[idx] + ".png"; } return null; } /* Reset add-on tile data */ private static void resetFiles() { synchronized(packlock) { packs.clear(); } addonfiles.clear(); addonfilesbyname.clear(); next_dynamic_tile = MAX_TILEINDEX+1; /* Now, load entries for vanilla v1.5 files */ for(int i = 0; i < terrain_map.length; i++) { String fn = getBlockFileName(i); if (fn != null) { int idx = findOrAddDynamicTileFile(fn, 1, 1, TileFileFormat.GRID, new String[0]); DynamicTileFile dtf = addonfiles.get(idx); if (dtf != null) { // Fix mapping of tile ID to global table index dtf.tile_to_dyntile[0] = i; } } } } private static class LoadedImage { int[] argb; int width, height; int trivial_color; } private int[][] terrain_argb; private int native_scale; private CTMTexturePack ctm; private BitSet hasBlockColoring = new BitSet(); // Quick lookup - (blockID << 4) + blockMeta - set if custom colorizer private DynIntHashMap blockColoring = new DynIntHashMap(); // Map - index by (blockID << 4) + blockMeta - Index of image for color map private int water_toned_op = COLORMOD_WATERTONED; private static final int IMG_GRASSCOLOR = 0; private static final int IMG_FOLIAGECOLOR = 1; private static final int IMG_CUSTOMWATERMOVING = 2; private static final int IMG_CUSTOMWATERSTILL = 3; private static final int IMG_CUSTOMLAVAMOVING = 4; private static final int IMG_CUSTOMLAVASTILL = 5; private static final int IMG_WATERCOLORX = 6; private static final int IMG_SWAMPGRASSCOLOR = 7; private static final int IMG_SWAMPFOLIAGECOLOR = 8; private static final int IMG_CNT = 9; /* 0-(IMG_CNT-1) are fixed, IMG_CNT+x is dynamic file x */ private LoadedImage[] imgs; private HashMap<Integer, TexturePack> scaled_textures; private Object scaledlock = new Object(); public enum BlockTransparency { OPAQUE, /* Block is opaque - blocks light - lit by light from adjacent blocks */ TRANSPARENT, /* Block is transparent - passes light - lit by light level in own block */ SEMITRANSPARENT, /* Opaque block that doesn't block all rays (steps, slabs) - use light above for face lighting on opaque blocks */ LEAVES /* Special case of transparent, to work around lighting errors in SpoutPlugin */ } public static class HDTextureMap { private int faces[]; /* index in terrain.png of image for each face (indexed by BlockStep.ordinal() OR patch index) */ private byte[] layers; /* If layered, each index corresponds to faces index, and value is index of next layer */ private List<Integer> blockids; private int databits; private BlockTransparency bt; private boolean userender; private String blockset; private int colorMult; private CustomColorMultiplier custColorMult; private boolean stdrotate; // Marked for corrected to proper : stdrot=true private static HDTextureMap[] texmaps; private static BlockTransparency transp[]; private static boolean userenderdata[]; private static HDTextureMap blank; private static void resizeTable(int idx) { int cnt = idx+1; /* Copy texture maps */ HDTextureMap[] newtexmaps = new HDTextureMap[cnt*16]; System.arraycopy(texmaps, 0, newtexmaps, 0, texmaps.length); Arrays.fill(newtexmaps, texmaps.length, newtexmaps.length, blank); texmaps = newtexmaps; /* Copy transparency */ BlockTransparency[] newtrans = new BlockTransparency[cnt]; System.arraycopy(transp, 0, newtrans, 0, transp.length); Arrays.fill(newtrans, transp.length, cnt, BlockTransparency.OPAQUE); transp = newtrans; /* Copy use-render-data */ boolean[] newurd = new boolean[cnt]; System.arraycopy(userenderdata, 0, newurd, 0, userenderdata.length); Arrays.fill(newurd, userenderdata.length, cnt, false); userenderdata = newurd; } private static void initializeTable() { texmaps = new HDTextureMap[16*BLOCKTABLELEN]; transp = new BlockTransparency[BLOCKTABLELEN]; userenderdata = new boolean[BLOCKTABLELEN]; blank = new HDTextureMap(); for(int i = 0; i < texmaps.length; i++) texmaps[i] = blank; for(int i = 0; i < transp.length; i++) transp[i] = BlockTransparency.OPAQUE; } private HDTextureMap() { blockids = Collections.singletonList(Integer.valueOf(0)); databits = 0xFFFF; userender = false; blockset = null; colorMult = 0; custColorMult = null; faces = new int[] { TILEINDEX_BLANK, TILEINDEX_BLANK, TILEINDEX_BLANK, TILEINDEX_BLANK, TILEINDEX_BLANK, TILEINDEX_BLANK }; layers = null; stdrotate = true; } public HDTextureMap(List<Integer> blockids, int databits, int[] faces, byte[] layers, BlockTransparency trans, boolean userender, int colorMult, CustomColorMultiplier custColorMult, String blockset, boolean stdrot) { this.faces = faces; this.layers = layers; this.blockids = blockids; this.databits = databits; this.bt = trans; this.colorMult = colorMult; this.custColorMult = custColorMult; this.userender = userender; this.blockset = blockset; this.stdrotate = stdrot; } public void addToTable() { /* Add entries to lookup table */ for(Integer blkid : blockids) { if(blkid >= transp.length) resizeTable(blkid); if(blkid > 0) { for(int i = 0; i < 16; i++) { if((databits & (1 << i)) != 0) { int idx = 16*blkid + i; if((this.blockset != null) && (this.blockset.equals("core") == false)) { HDBlockModels.resetIfNotBlockSet(blkid, i, this.blockset); } texmaps[idx] = this; } } transp[blkid] = bt; /* Transparency is only blocktype based right now */ userenderdata[blkid] = userender; /* Ditto for using render data */ } } } public static HDTextureMap getMap(int blkid, int blkdata, int blkrenderdata) { try { if(userenderdata[blkid]) return texmaps[(blkid<<4) + blkrenderdata]; else return texmaps[(blkid<<4) + blkdata]; } catch (Exception x) { resizeTable(blkid); return blank; } } public static BlockTransparency getTransparency(int blkid) { try { return transp[blkid]; } catch (Exception x) { resizeTable(blkid); return BlockTransparency.OPAQUE; } } private static void remapTexture(int id, int srcid) { for(int i = 0; i < 16; i++) { texmaps[(id<<4)+i] = texmaps[(srcid<<4)+i]; } } } /** * Texture map - used for accumulation of textures from different sources, keyed by lookup value */ public static class TextureMap { private Map<Integer, Integer> key_to_index = new HashMap<Integer, Integer>(); private List<Integer> texture_ids = new ArrayList<Integer>(); private List<Integer> blockids = new ArrayList<Integer>(); private int databits = 0; private BlockTransparency trans = BlockTransparency.OPAQUE; private boolean userender = false; private int colorMult = 0; private CustomColorMultiplier custColorMult = null; private String blockset; public int addTextureByKey(int key, int textureid) { int off = texture_ids.size(); /* Next index in array is texture index */ texture_ids.add(textureid); /* Add texture ID to list */ key_to_index.put(key, off); /* Add texture index to lookup by key */ return off; } } private static HashMap<String, TextureMap> textmap_by_id = new HashMap<String, TextureMap>(); /** * Add texture to texture map */ private static int addTextureByKey(String id, int key, int textureid) { TextureMap idx = textmap_by_id.get(id); if(idx == null) { /* Add empty one, if not found */ idx = new TextureMap(); textmap_by_id.put(id, idx); } return idx.addTextureByKey(key, textureid); } /** * Add settings for texture map */ private static void addTextureIndex(String id, List<Integer> blockids, int databits, BlockTransparency trans, boolean userender, int colorMult, CustomColorMultiplier custColorMult, String blockset) { TextureMap idx = textmap_by_id.get(id); if(idx == null) { /* Add empty one, if not found */ idx = new TextureMap(); textmap_by_id.put(id, idx); } idx.blockids = blockids; idx.databits = databits; idx.trans = trans; idx.userender = userender; idx.colorMult = colorMult; idx.custColorMult = custColorMult; } /** * Finish processing of texture indexes - add to texture maps */ private static void processTextureMaps() { for(TextureMap ti : textmap_by_id.values()) { if(ti.blockids.isEmpty()) continue; int[] txtids = new int[ti.texture_ids.size()]; for(int i = 0; i < txtids.length; i++) { txtids[i] = ti.texture_ids.get(i).intValue(); } HDTextureMap map = new HDTextureMap(ti.blockids, ti.databits, txtids, null, ti.trans, ti.userender, ti.colorMult, ti.custColorMult, ti.blockset, true); map.addToTable(); } } /** * Get index of texture in texture map */ public static int getTextureIndexFromTextureMap(String id, int key) { int idx = -1; TextureMap map = textmap_by_id.get(id); if(map != null) { Integer txtidx = map.key_to_index.get(key); if(txtidx != null) { idx = txtidx.intValue(); } } return idx; } /* * Get count of textures in given texture map */ public static int getTextureMapLength(String id) { TextureMap map = textmap_by_id.get(id); if(map != null) { return map.texture_ids.size(); } return -1; } /** Get or load texture pack */ public static TexturePack getTexturePack(DynmapCore core, String tpname) { synchronized(packlock) { TexturePack tp = packs.get(tpname); if(tp != null) return tp; try { tp = new TexturePack(core, tpname); /* Attempt to load pack */ packs.put(tpname, tp); return tp; } catch (FileNotFoundException fnfx) { Log.severe("Error loading texture pack '" + tpname + "' - not found"); } return null; } } /** * Constructor for texture pack, by name */ private TexturePack(DynmapCore core, String tpname) throws FileNotFoundException { File texturedir = getTexturePackDirectory(core); /* Set up for enough files */ imgs = new LoadedImage[IMG_CNT + addonfiles.size()]; // Get texture pack File f = new File(texturedir, tpname); // Build loader TexturePackLoader tpl = new TexturePackLoader(f); InputStream is = null; try { /* Load CTM support, if enabled */ if(core.isCTMSupportEnabled()) { ctm = new CTMTexturePack(tpl, this, core); if(ctm.isValid() == false) { ctm = null; } } /* Load custom colors support, if enabled */ if(core.isCustomColorsSupportEnabled()) { is = tpl.openTPResource("color.properties"); Properties p; if (is != null) { p = new Properties(); try { p.load(is); } finally { tpl.closeResource(is); } processCustomColors(p); } } /* Loop through dynamic files */ for(int i = 0; i < addonfiles.size(); i++) { DynamicTileFile dtf = addonfiles.get(i); is = tpl.openTPResource(dtf.filename); try { if(dtf.format == TileFileFormat.BIOME) loadBiomeShadingImage(is, i+IMG_CNT); /* Load image file */ else loadImage(is, i+IMG_CNT); /* Load image file */ } finally { tpl.closeResource(is); } } /* Find and load terrain.png */ is = tpl.openTPResource(TERRAIN_PNG); /* Try to find terrain.png */ if (is != null) { loadTerrainPNG(is); tpl.closeResource(is); } /* Try to find and load misc/grasscolor.png */ is = tpl.openTPResource(GRASSCOLOR_PNG); if (is != null) { loadBiomeShadingImage(is, IMG_GRASSCOLOR); tpl.closeResource(is); } /* Try to find and load misc/foliagecolor.png */ is = tpl.openTPResource(FOLIAGECOLOR_PNG); if (is != null) { loadBiomeShadingImage(is, IMG_FOLIAGECOLOR); tpl.closeResource(is); } /* Try to find and load misc/swampgrasscolor.png */ is = tpl.openTPResource(SWAMPGRASSCOLOR_PNG); if (is != null) { loadBiomeShadingImage(is, IMG_SWAMPGRASSCOLOR); tpl.closeResource(is); } /* Try to find and load misc/swampfoliagecolor.png */ is = tpl.openTPResource(SWAMPFOLIAGECOLOR_PNG); if (is != null) { loadBiomeShadingImage(is, IMG_SWAMPFOLIAGECOLOR); tpl.closeResource(is); } /* Try to find and load misc/watercolor.png */ is = tpl.openTPResource(WATERCOLORX_PNG); if (is != null) { loadBiomeShadingImage(is, IMG_WATERCOLORX); tpl.closeResource(is); } /* Optional files - process if they exist */ is = tpl.openTPResource(CUSTOMLAVASTILL_PNG); if (is == null) { is = tpl.openTPResource("anim/" + CUSTOMLAVASTILL_PNG); } if (is != null) { loadImage(is, IMG_CUSTOMLAVASTILL); tpl.closeResource(is); patchTextureWithImage(IMG_CUSTOMLAVASTILL, TILEINDEX_STATIONARYLAVA); patchTextureWithImage(IMG_CUSTOMLAVASTILL, TILEINDEX_MOVINGLAVA); } is = tpl.openTPResource(CUSTOMLAVAFLOWING_PNG); if (is == null) { is = tpl.openTPResource("anim/" + CUSTOMLAVAFLOWING_PNG); } if (is != null) { loadImage(is, IMG_CUSTOMLAVAMOVING); tpl.closeResource(is); patchTextureWithImage(IMG_CUSTOMLAVAMOVING, TILEINDEX_MOVINGLAVA); } is = tpl.openTPResource(CUSTOMWATERSTILL_PNG); if (is == null) { is = tpl.openTPResource("anim/" + CUSTOMWATERSTILL_PNG); } if (is != null) { loadImage(is, IMG_CUSTOMWATERSTILL); tpl.closeResource(is); patchTextureWithImage(IMG_CUSTOMWATERSTILL, TILEINDEX_STATIONARYWATER); patchTextureWithImage(IMG_CUSTOMWATERSTILL, TILEINDEX_MOVINGWATER); } is = tpl.openTPResource(CUSTOMWATERFLOWING_PNG); if (is == null) { is = tpl.openTPResource("anim/" + CUSTOMWATERFLOWING_PNG); } if (is != null) { loadImage(is, IMG_CUSTOMWATERMOVING); tpl.closeResource(is); patchTextureWithImage(IMG_CUSTOMWATERMOVING, TILEINDEX_MOVINGWATER); } /* Loop through dynamic files */ for(int i = 0; i < addonfiles.size(); i++) { DynamicTileFile dtf = addonfiles.get(i); processDynamicImage(i, dtf.format); } } catch (IOException iox) { Log.severe("Error loadling texture pack", iox); } finally { if (is != null) { try { is.close(); } catch (IOException iox) {} is = null; } tpl.close(); } } /** * Copy subimage from portions of given image * @param img_id - image ID of raw image * @param from_x - top-left X * @param from_y - top-left Y * @param to_x - dest topleft * @param to_y - dest topleft * @param width - width to copy * @param height - height to copy * @param dest_argb - destination tile buffer * @param dest_width - width of destination tile buffer */ private void copySubimageFromImage(int img_id, int from_x, int from_y, int to_x, int to_y, int width, int height, int[] dest_argb, int dest_width) { for(int h = 0; h < height; h++) { System.arraycopy(imgs[img_id].argb, (h+from_y)*imgs[img_id].width + from_x, dest_argb, dest_width*(h+to_y) + to_x, width); } } private enum HandlePos { CENTER, LEFT, RIGHT, NONE, LEFTFRONT, RIGHTFRONT }; /** * Make chest side image (based on chest and largechest layouts) * @param img_id - source image ID * @param dest_idx - destination tile index * @param src_x - starting X of source (scaled based on 64 high) * @param width - width to copy (scaled based on 64 high) * @param dest_x - destination X (scaled based on 64 high) * @param handlepos - 0=middle,1=leftedge,2=rightedge */ private void makeChestSideImage(int img_id, int dest_idx, int src_x, int width, int dest_x, HandlePos handlepos) { if(dest_idx <= 0) return; int mult = imgs[img_id].height / 64; /* Nominal height for chest images is 64 */ int[] tile = new int[16 * 16 * mult * mult]; /* Make image */ /* Copy top part */ copySubimageFromImage(img_id, src_x * mult, 14 * mult, dest_x * mult, 2 * mult, width * mult, 5 * mult, tile, 16 * mult); /* Copy bottom part */ copySubimageFromImage(img_id, src_x * mult, 34 * mult, dest_x * mult, 7 * mult, width * mult, 9 * mult, tile, 16 * mult); /* Handle the handle image */ if(handlepos == HandlePos.CENTER) { /* Middle */ copySubimageFromImage(img_id, 1 * mult, 1 * mult, 7 * mult, 4 * mult, 2 * mult, 4 * mult, tile, 16 * mult); } else if(handlepos == HandlePos.LEFT) { /* left edge */ copySubimageFromImage(img_id, 3 * mult, 1 * mult, 0 * mult, 4 * mult, 1 * mult, 4 * mult, tile, 16 * mult); } else if(handlepos == HandlePos.LEFTFRONT) { /* left edge - front of handle */ copySubimageFromImage(img_id, 2 * mult, 1 * mult, 0 * mult, 4 * mult, 1 * mult, 4 * mult, tile, 16 * mult); } else if(handlepos == HandlePos.RIGHT) { /* Right */ copySubimageFromImage(img_id, 0 * mult, 1 * mult, 15 * mult, 4 * mult, 1 * mult, 4 * mult, tile, 16 * mult); } else if(handlepos == HandlePos.RIGHTFRONT) { /* Right - front of handle */ copySubimageFromImage(img_id, 1 * mult, 1 * mult, 15 * mult, 4 * mult, 1 * mult, 4 * mult, tile, 16 * mult); } /* Put scaled result into tile buffer */ int new_argb[] = new int[native_scale*native_scale]; scaleTerrainPNGSubImage(16*mult, native_scale, tile, new_argb); terrain_argb[dest_idx] = new_argb; } /** * Make chest top/bottom image (based on chest and largechest layouts) * @param img_id - source image ID * @param dest_idx - destination tile index * @param src_x - starting X of source (scaled based on 64 high) * @param src_y - starting Y of source (scaled based on 64 high) * @param width - width to copy (scaled based on 64 high) * @param dest_x - destination X (scaled based on 64 high) * @param handlepos - 0=middle,1=left-edge (righttop),2=right-edge (lefttop) */ private void makeChestTopBottomImage(int img_id, int dest_idx, int src_x, int src_y, int width, int dest_x, HandlePos handlepos) { if(dest_idx <= 0) return; int mult = imgs[img_id].height / 64; /* Nominal height for chest images is 64 */ int[] tile = new int[16 * 16 * mult * mult]; /* Make image */ copySubimageFromImage(img_id, src_x * mult, src_y * mult, dest_x * mult, 1 * mult, width * mult, 14 * mult, tile, 16 * mult); /* Handle the handle image */ if(handlepos == HandlePos.CENTER) { /* Middle */ copySubimageFromImage(img_id, 1 * mult, 0, 7 * mult, 15 * mult, 2 * mult, 1 * mult, tile, 16 * mult); } else if(handlepos == HandlePos.LEFT) { /* left edge */ copySubimageFromImage(img_id, 2 * mult, 0, 0 * mult, 15 * mult, 1 * mult, 1 * mult, tile, 16 * mult); } else if(handlepos == HandlePos.RIGHT) { /* Right */ copySubimageFromImage(img_id, 1 * mult, 0, 15 * mult, 15 * mult, 1 * mult, 1 * mult, tile, 16 * mult); } /* Put scaled result into tile buffer */ int new_argb[] = new int[native_scale*native_scale]; scaleTerrainPNGSubImage(16*mult, native_scale, tile, new_argb); terrain_argb[dest_idx] = new_argb; } /** * Patch tiles based on image with chest-style layout */ private void patchChestImages(int img_id, int tile_top, int tile_bottom, int tile_front, int tile_back, int tile_left, int tile_right) { makeChestSideImage(img_id, tile_front, 14, 14, 1, HandlePos.CENTER); makeChestSideImage(img_id, tile_back, 42, 14, 1, HandlePos.NONE); makeChestSideImage(img_id, tile_left, 0, 14, 1, HandlePos.RIGHT); makeChestSideImage(img_id, tile_right, 28, 14, 1, HandlePos.LEFT); makeChestTopBottomImage(img_id, tile_top, 14, 0, 14, 1, HandlePos.CENTER); makeChestTopBottomImage(img_id, tile_bottom, 28, 19, 14, 1, HandlePos.CENTER); } /** * Patch tiles based on image with large-chest-style layout */ private void patchLargeChestImages(int img_id, int tile_topright, int tile_topleft, int tile_bottomright, int tile_bottomleft, int tile_right, int tile_left, int tile_frontright, int tile_frontleft, int tile_backright, int tile_backleft) { makeChestSideImage(img_id, tile_frontleft, 14, 15, 1, HandlePos.RIGHTFRONT); makeChestSideImage(img_id, tile_frontright, 29, 15, 0, HandlePos.LEFTFRONT); makeChestSideImage(img_id, tile_left, 0, 14, 1, HandlePos.RIGHT); makeChestSideImage(img_id, tile_right, 44, 14, 1, HandlePos.LEFT); makeChestSideImage(img_id, tile_backright, 58, 15, 1, HandlePos.NONE); makeChestSideImage(img_id, tile_backleft, 73, 15, 0, HandlePos.NONE); makeChestTopBottomImage(img_id, tile_topleft, 14, 0, 15, 1, HandlePos.RIGHT); makeChestTopBottomImage(img_id, tile_topright, 29, 0, 15, 0, HandlePos.LEFT); makeChestTopBottomImage(img_id, tile_bottomleft, 34, 19, 15, 1, HandlePos.RIGHT); makeChestTopBottomImage(img_id, tile_bottomright, 49, 19, 15, 0, HandlePos.LEFT); } /** * Make sign image (based on sign layouts) * @param img_id - source image ID * @param dest_idx - destination tile index * @param src_x - starting X of source (scaled based on 32 high) * @param src_y - starting Y of source (scaled based on 32 high) * @param width - width to copy (scaled based on 32 high) * @param height - height to copy (scaled based on 32 high) */ private void makeSignImage(int img_id, int dest_idx, int src_x, int src_y, int width, int height) { int mult = imgs[img_id].height / 32; /* Nominal height for sign images is 32 */ int[] tile = new int[24 * 24 * mult * mult]; /* Make image (all are 24x24) */ copySubimageFromImage(img_id, src_x * mult, src_y * mult, 0, (24-height)*mult, width * mult, height * mult, tile, 24 * mult); /* Put scaled result into tile buffer */ int new_argb[] = new int[native_scale*native_scale]; scaleTerrainPNGSubImage(24*mult, native_scale, tile, new_argb); terrain_argb[dest_idx] = new_argb; } private void patchSignImages(int img, int sign_front, int sign_back, int sign_top, int sign_bottom, int sign_left, int sign_right, int post_front, int post_back, int post_left, int post_right) { /* Load images at lower left corner of each tile */ makeSignImage(img, sign_front, 2, 2, 24, 12); makeSignImage(img, sign_back, 28, 2, 24, 12); makeSignImage(img, sign_top, 2, 0, 24, 2); makeSignImage(img, sign_left, 0, 2, 2, 12); makeSignImage(img, sign_right, 26, 2, 2, 12); makeSignImage(img, sign_bottom, 26, 0, 24, 2); makeSignImage(img, post_front, 0, 16, 2, 14); makeSignImage(img, post_right, 2, 16, 2, 14); makeSignImage(img, post_back, 4, 16, 2, 14); makeSignImage(img, post_left, 6, 16, 2, 14); } /** * Make face image (based on skin layouts) * @param img_id - source image ID * @param dest_idx - destination tile index * @param src_x - starting X of source (scaled based on 32 high) * @param src_y - starting Y of source (scaled based on 32 high) */ private void makeFaceImage(int img_id, int dest_idx, int src_x, int src_y) { int mult = imgs[img_id].width / 64; /* Nominal height for skin images is 32 */ int[] tile = new int[8 * 8 * mult * mult]; /* Make image (all are 8x8) */ copySubimageFromImage(img_id, src_x * mult, src_y * mult, 0, 0, 8 * mult, 8 * mult, tile, 8 * mult); /* Put scaled result into tile buffer */ int new_argb[] = new int[native_scale*native_scale]; scaleTerrainPNGSubImage(8 * mult, native_scale, tile, new_argb); terrain_argb[dest_idx] = new_argb; } private void patchSkinImages(int img, int face_front, int face_left, int face_right, int face_back, int face_top, int face_bottom) { makeFaceImage(img, face_front, 8, 8); makeFaceImage(img, face_left, 16, 8); makeFaceImage(img, face_right, 0, 8); makeFaceImage(img, face_back, 24, 8); makeFaceImage(img, face_top, 8, 0); makeFaceImage(img, face_bottom, 16, 0); } private void patchCustomImages(int img_id, int[] imgids, List<CustomTileRec> recs, int xcnt, int ycnt) { int mult = imgs[img_id].height / (ycnt * 16); /* Compute scale based on nominal tile count vertically (ycnt * 16) */ for(int i = 0; i < imgids.length; i++) { if(imgids[i] <= 0) continue; CustomTileRec ctr = recs.get(i); if(ctr == null) continue; int[] tile = new int[16 * 16 * mult * mult]; /* Make image */ copySubimageFromImage(img_id, ctr.srcx * mult, ctr.srcy * mult, ctr.targetx * mult, ctr.targety * mult, ctr.width * mult, ctr.height * mult, tile, 16 * mult); /* Put scaled result into tile buffer */ int new_argb[] = new int[native_scale*native_scale]; scaleTerrainPNGSubImage(16*mult, native_scale, tile, new_argb); terrain_argb[imgids[i]] = new_argb; } } /* Copy texture pack */ private TexturePack(TexturePack tp) { this.terrain_argb = new int[tp.terrain_argb.length][]; System.arraycopy(tp.terrain_argb, 0, this.terrain_argb, 0, this.terrain_argb.length); this.native_scale = tp.native_scale; this.water_toned_op = tp.water_toned_op; this.ctm = tp.ctm; this.imgs = tp.imgs; this.hasBlockColoring = tp.hasBlockColoring; this.blockColoring = tp.blockColoring; } /* Load terrain.png */ private void loadTerrainPNG(InputStream is) throws IOException { int i, j; /* Load image */ ImageIO.setUseCache(false); BufferedImage img = ImageIO.read(is); if(img == null) { throw new FileNotFoundException(); } terrain_argb = new int[TILETABLE_LEN][]; int[] blank; /* If we're using pre 1.5 terrain.png */ if(img.getWidth() >= 256) { native_scale = img.getWidth() / 16; blank = new int[native_scale*native_scale]; for(i = 0; i < 256; i++) { terrain_argb[i] = new int[native_scale*native_scale]; img.getRGB((i & 0xF)*native_scale, (i>>4)*native_scale, native_scale, native_scale, terrain_argb[i], 0, native_scale); } /* Now, load extra scaled images */ for(i = 256; i < terrain_map.length; i++) { terrain_argb[i] = blank; String fn = getBlockFileName(i); if (fn == null) continue; DynamicTileFile dtf = addonfilesbyname.get(fn); if (dtf == null) continue; LoadedImage li = imgs[dtf.idx + IMG_CNT]; if(li != null) { terrain_argb[i] = new int[native_scale * native_scale]; scaleTerrainPNGSubImage(li.width, native_scale, li.argb, terrain_argb[i]); } } } else { /* Else, use v1.5 tile files */ native_scale = 16; /* Loop through textures - find biggest one */ for(i = 0; i < terrain_map.length; i++) { String fn = getBlockFileName(i); if (fn == null) continue; DynamicTileFile dtf = addonfilesbyname.get(fn); if (dtf == null) continue; LoadedImage li = imgs[dtf.idx+IMG_CNT]; if(li != null) { if(native_scale < li.width) native_scale = li.width; } } blank = new int[native_scale*native_scale]; /* Now, load scaled images */ for(i = 0; i < terrain_map.length; i++) { terrain_argb[i] = blank; String fn = getBlockFileName(i); if (fn == null) continue; DynamicTileFile dtf = addonfilesbyname.get(fn); if (dtf == null) continue; LoadedImage li = imgs[dtf.idx + IMG_CNT]; if(li != null) { terrain_argb[i] = new int[native_scale * native_scale]; scaleTerrainPNGSubImage(li.width, native_scale, li.argb, terrain_argb[i]); } } } for(i = terrain_map.length; i < TILETABLE_LEN; i++) { terrain_argb[i] = blank; } /* Now, build redstone textures with active wire color (since we're not messing with that) */ Color tc = new Color(); for(i = 0; i < native_scale*native_scale; i++) { if(terrain_argb[TILEINDEX_REDSTONE_NSEW_TONE][i] != 0) { /* Overlay NSEW redstone texture with toned wire color */ tc.setARGB(terrain_argb[TILEINDEX_REDSTONE_NSEW_TONE][i]); tc.blendColor(0xFFC00000); /* Blend in red */ terrain_argb[TILEINDEX_REDSTONE_NSEW][i] = tc.getARGB(); } if(terrain_argb[TILEINDEX_REDSTONE_EW_TONE][i] != 0) { /* Overlay NSEW redstone texture with toned wire color */ tc.setARGB(terrain_argb[TILEINDEX_REDSTONE_EW_TONE][i]); tc.blendColor(0xFFC00000); /* Blend in red */ terrain_argb[TILEINDEX_REDSTONE_EW][i] = tc.getARGB(); } } /* Build extended piston side texture - take top 1/4 of piston side, use to make piston extension */ terrain_argb[TILEINDEX_PISTONEXTSIDE] = new int[native_scale*native_scale]; System.arraycopy(terrain_argb[TILEINDEX_PISTONSIDE], 0, terrain_argb[TILEINDEX_PISTONEXTSIDE], 0, native_scale * native_scale / 4); for(i = 0; i < native_scale/4; i++) { for(j = 0; j < (3*native_scale/4); j++) { terrain_argb[TILEINDEX_PISTONEXTSIDE][native_scale*(native_scale/4 + j) + (3*native_scale/8 + i)] = terrain_argb[TILEINDEX_PISTONSIDE][native_scale*i + j]; } } /* Build piston side while extended (cut off top 1/4, replace with rotated top for extension */ terrain_argb[TILEINDEX_PISTONSIDE_EXT] = new int[native_scale*native_scale]; System.arraycopy(terrain_argb[TILEINDEX_PISTONSIDE], native_scale*native_scale/4, terrain_argb[TILEINDEX_PISTONSIDE_EXT], native_scale*native_scale/4, 3 * native_scale * native_scale / 4); /* Copy bottom 3/4 */ for(i = 0; i < native_scale/4; i++) { for(j = 3*native_scale/4; j < native_scale; j++) { terrain_argb[TILEINDEX_PISTONSIDE_EXT][native_scale*(j - 3*native_scale/4) + (3*native_scale/8 + i)] = terrain_argb[TILEINDEX_PISTONSIDE][native_scale*i + j]; } } /* Build glass pane top in NSEW config (we use model to clip it) */ terrain_argb[TILEINDEX_PANETOP_X] = new int[native_scale*native_scale]; System.arraycopy(terrain_argb[TILEINDEX_GLASSPANETOP], 0, terrain_argb[TILEINDEX_PANETOP_X], 0, native_scale*native_scale); for(i = native_scale*7/16; i < native_scale*9/16; i++) { for(j = 0; j < native_scale; j++) { terrain_argb[TILEINDEX_PANETOP_X][native_scale*i + j] = terrain_argb[TILEINDEX_PANETOP_X][native_scale*j + i]; } } /* Build air frame with eye overlay */ terrain_argb[TILEINDEX_AIRFRAME_EYE] = new int[native_scale*native_scale]; System.arraycopy(terrain_argb[TILEINDEX_AIRFRAME], 0, terrain_argb[TILEINDEX_AIRFRAME_EYE], 0, native_scale*native_scale); for(i = native_scale/4; i < native_scale*3/4; i++) { for(j = native_scale/4; j < native_scale*3/4; j++) { terrain_argb[TILEINDEX_AIRFRAME_EYE][native_scale*i + j] = terrain_argb[TILEINDEX_EYEOFENDER][native_scale*i + j]; } } img.flush(); } /* Load image into image array */ private void loadImage(InputStream is, int idx) throws IOException { BufferedImage img = null; /* Load image */ if(is != null) { ImageIO.setUseCache(false); img = ImageIO.read(is); if(img == null) { throw new FileNotFoundException(); } } if(idx >= imgs.length) { LoadedImage[] newimgs = new LoadedImage[idx+1]; System.arraycopy(imgs, 0, newimgs, 0, imgs.length); imgs = newimgs; } imgs[idx] = new LoadedImage(); if (img != null) { imgs[idx].width = img.getWidth(); imgs[idx].height = img.getHeight(); imgs[idx].argb = new int[imgs[idx].width * imgs[idx].height]; img.getRGB(0, 0, imgs[idx].width, imgs[idx].height, imgs[idx].argb, 0, imgs[idx].width); img.flush(); } else { imgs[idx].width = 16; imgs[idx].height = 16; imgs[idx].argb = new int[imgs[idx].width * imgs[idx].height]; } } /* Process dynamic texture files, and patch into terrain_argb */ private void processDynamicImage(int idx, TileFileFormat format) { DynamicTileFile dtf = addonfiles.get(idx); /* Get tile file definition */ LoadedImage li = imgs[idx+IMG_CNT]; if (li == null) return; switch(format) { case GRID: /* If grid format tile file */ int dim = li.width / dtf.tilecnt_x; /* Dimension of each tile */ int old_argb[] = new int[dim*dim]; for(int x = 0; x < dtf.tilecnt_x; x++) { for(int y = 0; y < dtf.tilecnt_y; y++) { int tileidx = dtf.tile_to_dyntile[y*dtf.tilecnt_x + x]; if (tileidx < 0) continue; if((tileidx >= terrain_map.length) || (terrain_map[tileidx] == null)) { /* dynamic ID? */ /* Copy source tile */ for(int j = 0; j < dim; j++) { System.arraycopy(li.argb, (y*dim+j)*li.width + (x*dim), old_argb, j*dim, dim); } /* Rescale to match rest of terrain PNG */ int new_argb[] = new int[native_scale*native_scale]; scaleTerrainPNGSubImage(dim, native_scale, old_argb, new_argb); terrain_argb[tileidx] = new_argb; } } } break; case CHEST: patchChestImages(idx+IMG_CNT, dtf.tile_to_dyntile[TILEINDEX_CHEST_TOP], dtf.tile_to_dyntile[TILEINDEX_CHEST_BOTTOM], dtf.tile_to_dyntile[TILEINDEX_CHEST_FRONT], dtf.tile_to_dyntile[TILEINDEX_CHEST_BACK], dtf.tile_to_dyntile[TILEINDEX_CHEST_LEFT], dtf.tile_to_dyntile[TILEINDEX_CHEST_RIGHT]); break; case BIGCHEST: patchLargeChestImages(idx+IMG_CNT, dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_TOPRIGHT], dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_TOPLEFT], dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_BOTTOMRIGHT], dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_BOTTOMLEFT], dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_RIGHT], dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_LEFT], dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_FRONTRIGHT], dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_FRONTLEFT], dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_BACKRIGHT], dtf.tile_to_dyntile[TILEINDEX_BIGCHEST_BACKLEFT]); break; case SIGN: patchSignImages(idx+IMG_CNT, dtf.tile_to_dyntile[TILEINDEX_SIGN_FRONT], dtf.tile_to_dyntile[TILEINDEX_SIGN_BACK], dtf.tile_to_dyntile[TILEINDEX_SIGN_TOP], dtf.tile_to_dyntile[TILEINDEX_SIGN_BOTTOM], dtf.tile_to_dyntile[TILEINDEX_SIGN_LEFTSIDE], dtf.tile_to_dyntile[TILEINDEX_SIGN_RIGHTSIDE], dtf.tile_to_dyntile[TILEINDEX_SIGN_POSTFRONT], dtf.tile_to_dyntile[TILEINDEX_SIGN_POSTBACK], dtf.tile_to_dyntile[TILEINDEX_SIGN_POSTLEFT], dtf.tile_to_dyntile[TILEINDEX_SIGN_POSTRIGHT]); break; case SKIN: patchSkinImages(idx+IMG_CNT, dtf.tile_to_dyntile[TILEINDEX_SKIN_FACEFRONT], dtf.tile_to_dyntile[TILEINDEX_SKIN_FACELEFT], dtf.tile_to_dyntile[TILEINDEX_SKIN_FACERIGHT], dtf.tile_to_dyntile[TILEINDEX_SKIN_FACEBACK], dtf.tile_to_dyntile[TILEINDEX_SKIN_FACETOP], dtf.tile_to_dyntile[TILEINDEX_SKIN_FACEBOTTOM]); break; case CUSTOM: patchCustomImages(idx+IMG_CNT, dtf.tile_to_dyntile, dtf.cust, dtf.tilecnt_x, dtf.tilecnt_y); break; case TILESET: // TODO break; default: break; } } /* Load biome shading image into image array */ private void loadBiomeShadingImage(InputStream is, int idx) throws IOException { loadImage(is, idx); /* Get image */ LoadedImage li = imgs[idx]; if (li.width != 256) { /* Required to be 256 x 256 */ int[] scaled = new int[256*256]; scaleTerrainPNGSubImage(li.width, 256, li.argb, scaled); li.argb = scaled; li.width = 256; li.height = 256; } /* Get trivial color for biome-shading image */ int clr = li.argb[li.height*li.width*3/4 + li.width/2]; boolean same = true; for(int j = 0; same && (j < li.height); j++) { for(int i = 0; same && (i <= j); i++) { if(li.argb[li.width*j+i] != clr) same = false; } } /* All the same - no biome lookup needed */ if(same) { li.trivial_color = clr; } else { /* Else, calculate color average for lower left quadrant */ int[] clr_scale = new int[4]; scaleTerrainPNGSubImage(li.width, 2, li.argb, clr_scale); li.trivial_color = clr_scale[2]; } } /* Patch image into texture table */ private void patchTextureWithImage(int image_idx, int block_idx) { /* Now, patch in to block table */ int new_argb[] = new int[native_scale*native_scale]; scaleTerrainPNGSubImage(imgs[image_idx].width, native_scale, imgs[image_idx].argb, new_argb); terrain_argb[block_idx] = new_argb; } /* Get texture pack directory */ private static File getTexturePackDirectory(DynmapCore core) { return new File(core.getDataFolder(), "texturepacks"); } /** * Resample terrain pack for given scale, and return copy using that scale */ public TexturePack resampleTexturePack(int scale) { synchronized(scaledlock) { if(scaled_textures == null) scaled_textures = new HashMap<Integer, TexturePack>(); TexturePack stp = scaled_textures.get(scale); if(stp != null) return stp; stp = new TexturePack(this); /* Make copy */ /* Scale terrain.png, if needed */ if(stp.native_scale != scale) { stp.native_scale = scale; scaleTerrainPNG(stp); } /* Remember it */ scaled_textures.put(scale, stp); return stp; } } /** * Scale out terrain_argb into the terrain_argb of the provided destination, matching the scale of that destination * @param tp */ private void scaleTerrainPNG(TexturePack tp) { tp.terrain_argb = new int[terrain_argb.length][]; /* Terrain.png is 16x16 array of images : process one at a time */ for(int idx = 0; idx < terrain_argb.length; idx++) { tp.terrain_argb[idx] = new int[tp.native_scale*tp.native_scale]; scaleTerrainPNGSubImage(native_scale, tp.native_scale, terrain_argb[idx], tp.terrain_argb[idx]); } /* Special case - some textures are used as masks - need pure alpha (00 or FF) */ makeAlphaPure(tp.terrain_argb[TILEINDEX_GRASSMASK]); /* Grass side mask */ } public static void scaleTerrainPNGSubImage(int srcscale, int destscale, int[] src_argb, int[] dest_argb) { int nativeres = srcscale; int res = destscale; Color c = new Color(); /* Same size, so just copy */ if(res == nativeres) { System.arraycopy(src_argb, 0, dest_argb, 0, dest_argb.length); } /* If we're scaling larger source pixels into smaller pixels, each destination pixel * receives input from 1 or 2 source pixels on each axis */ else if(res > nativeres) { int weights[] = new int[res]; int offsets[] = new int[res]; /* LCM of resolutions is used as length of line (res * nativeres) * Each native block is (res) long, each scaled block is (nativeres) long * Each scaled block overlaps 1 or 2 native blocks: starting with native block 'offsets[]' with * 'weights[]' of its (res) width in the first, and the rest in the second */ for(int v = 0, idx = 0; v < res*nativeres; v += nativeres, idx++) { offsets[idx] = (v/res); /* Get index of the first native block we draw from */ if((v+nativeres-1)/res == offsets[idx]) { /* If scaled block ends in same native block */ weights[idx] = nativeres; } else { /* Else, see how much is in first one */ weights[idx] = (offsets[idx]*res + res) - v; } } /* Now, use weights and indices to fill in scaled map */ for(int y = 0; y < res; y++) { int ind_y = offsets[y]; int wgt_y = weights[y]; for(int x = 0; x < res; x++) { int ind_x = offsets[x]; int wgt_x = weights[x]; double accum_red = 0; double accum_green = 0; double accum_blue = 0; double accum_alpha = 0; for(int xx = 0; xx < 2; xx++) { int wx = (xx==0)?wgt_x:(nativeres-wgt_x); if(wx == 0) continue; for(int yy = 0; yy < 2; yy++) { int wy = (yy==0)?wgt_y:(nativeres-wgt_y); if(wy == 0) continue; /* Accumulate */ c.setARGB(src_argb[(ind_y+yy)*nativeres + ind_x + xx]); int w = wx * wy; double a = (double)w * (double)c.getAlpha(); accum_red += c.getRed() * a; accum_green += c.getGreen() * a; accum_blue += c.getBlue() * a; accum_alpha += a; } } double newalpha = accum_alpha; if(newalpha == 0.0) newalpha = 1.0; /* Generate weighted compnents into color */ c.setRGBA((int)(accum_red / newalpha), (int)(accum_green / newalpha), (int)(accum_blue / newalpha), (int)(accum_alpha / (nativeres*nativeres))); dest_argb[(y*res) + x] = c.getARGB(); } } } else { /* nativeres > res */ int weights[] = new int[nativeres]; int offsets[] = new int[nativeres]; /* LCM of resolutions is used as length of line (res * nativeres) * Each native block is (res) long, each scaled block is (nativeres) long * Each native block overlaps 1 or 2 scaled blocks: starting with scaled block 'offsets[]' with * 'weights[]' of its (res) width in the first, and the rest in the second */ for(int v = 0, idx = 0; v < res*nativeres; v += res, idx++) { offsets[idx] = (v/nativeres); /* Get index of the first scaled block we draw to */ if((v+res-1)/nativeres == offsets[idx]) { /* If native block ends in same scaled block */ weights[idx] = res; } else { /* Else, see how much is in first one */ weights[idx] = (offsets[idx]*nativeres + nativeres) - v; } } double accum_red[] = new double[res*res]; double accum_green[] = new double[res*res]; double accum_blue[] = new double[res*res]; double accum_alpha[] = new double[res*res]; /* Now, use weights and indices to fill in scaled map */ for(int y = 0; y < nativeres; y++) { int ind_y = offsets[y]; int wgt_y = weights[y]; for(int x = 0; x < nativeres; x++) { int ind_x = offsets[x]; int wgt_x = weights[x]; c.setARGB(src_argb[(y*nativeres) + x]); for(int xx = 0; xx < 2; xx++) { int wx = (xx==0)?wgt_x:(res-wgt_x); if(wx == 0) continue; for(int yy = 0; yy < 2; yy++) { int wy = (yy==0)?wgt_y:(res-wgt_y); if(wy == 0) continue; double w = wx * wy; double a = w * c.getAlpha(); accum_red[(ind_y+yy)*res + (ind_x+xx)] += c.getRed() * a; accum_green[(ind_y+yy)*res + (ind_x+xx)] += c.getGreen() * a; accum_blue[(ind_y+yy)*res + (ind_x+xx)] += c.getBlue() * a; accum_alpha[(ind_y+yy)*res + (ind_x+xx)] += a; } } } } /* Produce normalized scaled values */ for(int y = 0; y < res; y++) { for(int x = 0; x < res; x++) { int off = (y*res) + x; double aa = accum_alpha[off]; if(aa == 0.0) aa = 1.0; c.setRGBA((int)(accum_red[off]/aa), (int)(accum_green[off]/aa), (int)(accum_blue[off]/aa), (int)(accum_alpha[off] / (nativeres*nativeres))); dest_argb[y*res + x] = c.getARGB(); } } } } private static void addFiles(List<String> tsfiles, List<String> txfiles, File dir, String path) { File[] listfiles = dir.listFiles(); if(listfiles == null) return; for(File f : listfiles) { String fn = f.getName(); if(fn.equals(".") || (fn.equals(".."))) continue; if(f.isFile()) { if(fn.endsWith("-texture.txt")) { txfiles.add(path + fn); } if(fn.endsWith("-tilesets.txt")) { tsfiles.add(path + fn); } } else if(f.isDirectory()) { addFiles(tsfiles, txfiles, f, path + f.getName() + "/"); } } } /** * Load texture pack mappings */ public static void loadTextureMapping(DynmapCore core, ConfigurationNode config) { File datadir = core.getDataFolder(); /* Start clean with texture packs - need to be loaded after mapping */ resetFiles(); /* Initialize map with blank map for all entries */ HDTextureMap.initializeTable(); /* Load block models */ InputStream in = TexturePack.class.getResourceAsStream("/texture.txt"); if(in != null) { loadTextureFile(in, "texture.txt", config, core, "core"); if(in != null) { try { in.close(); } catch (IOException x) {} in = null; } } else Log.severe("Error loading texture.txt"); File renderdir = new File(datadir, "renderdata"); ArrayList<String> tsfiles = new ArrayList<String>(); ArrayList<String> txfiles = new ArrayList<String>(); addFiles(tsfiles, txfiles, renderdir, ""); for(String fname : tsfiles) { File custom = new File(renderdir, fname); if(custom.canRead()) { try { in = new FileInputStream(custom); loadTileSetsFile(in, custom.getPath(), config, core, fname.substring(0, fname.indexOf("-tilesets.txt"))); } catch (IOException iox) { Log.severe("Error loading " + custom.getPath() + " - " + iox); } finally { if(in != null) { try { in.close(); } catch (IOException x) {} in = null; } } } } for(String fname : txfiles) { File custom = new File(renderdir, fname); if(custom.canRead()) { try { in = new FileInputStream(custom); loadTextureFile(in, custom.getPath(), config, core, fname.substring(0, fname.indexOf("-texture.txt"))); } catch (IOException iox) { Log.severe("Error loading " + custom.getPath() + " - " + iox); } finally { if(in != null) { try { in.close(); } catch (IOException x) {} in = null; } } } } /* Finish processing of texture maps */ processTextureMaps(); /* Check integrity of texture mappings versus models */ for(int blkiddata = 0; blkiddata < HDTextureMap.texmaps.length; blkiddata++) { int blkid = (blkiddata >> 4); int blkdata = blkiddata & 0xF; HDTextureMap tm = HDTextureMap.texmaps[blkiddata]; int cnt = HDBlockModels.getNeededTextureCount(blkid, blkdata); if(cnt > tm.faces.length){ Log.severe("Block ID " + blkid + ":" + blkdata + " - not enough textures for faces (" + cnt + " > " + tm.faces.length + ")"); int[] newfaces = new int[cnt]; System.arraycopy(tm.faces, 0, newfaces, 0, tm.faces.length); for(int i = tm.faces.length; i < cnt; i++) { newfaces[i] = TILEINDEX_BLANK; } } } } private static Integer getIntValue(Map<String,Integer> vars, String val) throws NumberFormatException { if(Character.isLetter(val.charAt(0))) { int off = val.indexOf('+'); int offset = 0; if (off > 0) { offset = Integer.valueOf(val.substring(off+1)); val = val.substring(0, off); } Integer v = vars.get(val); if(v == null) throw new NumberFormatException("invalid ID - " + val); if((offset != 0) && (v.intValue() > 0)) v = v.intValue() + offset; return v; } else { return Integer.valueOf(val); } } private static int parseTextureIndex(HashMap<String,Integer> filetoidx, int srctxtid, String val) throws NumberFormatException { int off = val.indexOf(':'); int txtid = -1; if(off > 0) { String txt = val.substring(off+1); if(filetoidx.containsKey(txt)) { srctxtid = filetoidx.get(txt); } else { throw new NumberFormatException("Unknown attribute: " + txt); } txtid = Integer.valueOf(val.substring(0, off)); } else { txtid = Integer.valueOf(val); } /* Shift function code from x1000 to x1000000 for internal processing */ int funcid = (txtid / COLORMOD_MULT_FILE); txtid = txtid - (COLORMOD_MULT_FILE * funcid); /* If we have source texture, need to map values to dynamic ids */ if((srctxtid >= 0) && (txtid >= 0)) { /* Map to assigned ID in global tile table: preserve modifier */ txtid =findOrAddDynamicTile(srctxtid, txtid); } return txtid + (COLORMOD_MULT_INTERNAL * funcid); } /** * Load texture pack mappings from tilesets.txt file */ private static void loadTileSetsFile(InputStream txtfile, String txtname, ConfigurationNode config, DynmapCore core, String blockset) { LineNumberReader rdr = null; DynamicTileFile tfile = null; try { String line; rdr = new LineNumberReader(new InputStreamReader(txtfile)); while((line = rdr.readLine()) != null) { if(line.startsWith("#")) { } else if(line.startsWith("tileset:")) { /* Start of tileset definition */ line = line.substring(line.indexOf(':')+1); int xdim = 16, ydim = 16; String fname = null; String setdir = null; String[] toks = line.split(","); for(String tok : toks) { String[] v = tok.split("="); if(v.length < 2) continue; if(v[0].equals("xcount")) { xdim = Integer.parseInt(v[1]); } else if(v[0].equals("ycount")) { ydim = Integer.parseInt(v[1]); } else if(v[0].equals("setdir")) { setdir = v[1]; } else if(v[0].equals("filename")) { fname = v[1]; } } if ((fname != null) && (setdir != null)) { /* Register tile file */ int fid = findOrAddDynamicTileFile(fname, xdim, ydim, TileFileFormat.TILESET, new String[0]); tfile = addonfiles.get(fid); if (tfile == null) { Log.severe("Error registering tile set " + fname + " at " + rdr.getLineNumber() + " of " + txtname); return; } /* Initialize tile name map and set directory path */ tfile.tilenames = new String[tfile.tile_to_dyntile.length]; tfile.setdir = setdir; } else { Log.severe("Error defining tile set at " + rdr.getLineNumber() + " of " + txtname); return; } } else if(Character.isDigit(line.charAt(0))) { /* Starts with digit? tile mapping */ int split = line.indexOf('-'); /* Find first dash */ if(split < 0) continue; String id = line.substring(0, split).trim(); String name = line.substring(split+1).trim(); String[] coord = id.split(","); int idx = -1; if(coord.length == 2) { /* If x,y */ idx = (Integer.parseInt(coord[1]) * tfile.tilecnt_x) + Integer.parseInt(coord[0]); } else if(coord.length == 1) { /* Just index */ idx = Integer.parseInt(coord[0]); } if((idx >= 0) && (idx < tfile.tilenames.length)) { tfile.tilenames[idx] = name; } else { Log.severe("Bad tile index - line " + rdr.getLineNumber() + " of " + txtname); } } } } catch (IOException iox) { Log.severe("Error reading " + txtname + " - " + iox.toString()); } catch (NumberFormatException nfx) { Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname + ": " + nfx.getMessage()); } finally { if(rdr != null) { try { rdr.close(); rdr = null; } catch (IOException e) { } } } } /** * Load texture pack mappings from texture.txt file */ private static void loadTextureFile(InputStream txtfile, String txtname, ConfigurationNode config, DynmapCore core, String blockset) { LineNumberReader rdr = null; int cnt = 0; HashMap<String,Integer> filetoidx = new HashMap<String,Integer>(); HashMap<String,Integer> varvals = new HashMap<String,Integer>(); boolean mod_cfg_needed = false; String modname = null; try { String line; rdr = new LineNumberReader(new InputStreamReader(txtfile)); while((line = rdr.readLine()) != null) { if(line.startsWith("block:")) { ArrayList<Integer> blkids = new ArrayList<Integer>(); int databits = -1; int srctxtid = -1; int faces[] = new int[] { TILEINDEX_BLANK, TILEINDEX_BLANK, TILEINDEX_BLANK, TILEINDEX_BLANK, TILEINDEX_BLANK, TILEINDEX_BLANK }; int txtidx[] = new int[] { -1, -1, -1, -1, -1, -1 }; byte layers[] = null; line = line.substring(6); BlockTransparency trans = BlockTransparency.OPAQUE; int colorMult = 0; boolean stdrot = false; // Legacy top/bottom rotation CustomColorMultiplier custColorMult = null; String[] args = line.split(","); for(String a : args) { String[] av = a.split("="); if(av.length < 2) continue; else if(av[0].equals("txtid")) { if(filetoidx.containsKey(av[1])) srctxtid = filetoidx.get(av[1]); else Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname + ": bad texture " + av[1]); } } boolean userenderdata = false; for(String a : args) { String[] av = a.split("="); if(av.length < 2) continue; if(av[0].equals("id")) { blkids.add(getIntValue(varvals, av[1])); } else if(av[0].equals("data")) { if(databits < 0) databits = 0; if(av[1].equals("*")) databits = 0xFFFF; else databits |= (1 << getIntValue(varvals,av[1])); } else if(av[0].equals("top") || av[0].equals("y-") || av[0].equals("face1")) { faces[BlockStep.Y_MINUS.ordinal()] = parseTextureIndex(filetoidx, srctxtid, av[1]); } else if(av[0].equals("bottom") || av[0].equals("y+") || av[0].equals("face0")) { faces[BlockStep.Y_PLUS.ordinal()] = parseTextureIndex(filetoidx, srctxtid, av[1]); } else if(av[0].equals("north") || av[0].equals("x+") || av[0].equals("face4")) { faces[BlockStep.X_PLUS.ordinal()] = parseTextureIndex(filetoidx, srctxtid, av[1]); } else if(av[0].equals("south") || av[0].equals("x-") || av[0].equals("face5")) { faces[BlockStep.X_MINUS.ordinal()] = parseTextureIndex(filetoidx, srctxtid, av[1]); } else if(av[0].equals("west") || av[0].equals("z-") || av[0].equals("face3")) { faces[BlockStep.Z_MINUS.ordinal()] = parseTextureIndex(filetoidx, srctxtid, av[1]); } else if(av[0].equals("east") || av[0].equals("z+") || av[0].equals("face2")) { faces[BlockStep.Z_PLUS.ordinal()] = parseTextureIndex(filetoidx, srctxtid, av[1]); } else if(av[0].equals("allfaces")) { int id = parseTextureIndex(filetoidx, srctxtid, av[1]); for(int i = 0; i < 6; i++) { faces[i] = id; } } else if(av[0].equals("allsides")) { int id = parseTextureIndex(filetoidx, srctxtid, av[1]); faces[BlockStep.X_PLUS.ordinal()] = id; faces[BlockStep.X_MINUS.ordinal()] = id; faces[BlockStep.Z_PLUS.ordinal()] = id; faces[BlockStep.Z_MINUS.ordinal()] = id; } else if(av[0].equals("topbottom")) { faces[BlockStep.Y_MINUS.ordinal()] = faces[BlockStep.Y_PLUS.ordinal()] = parseTextureIndex(filetoidx, srctxtid, av[1]); } else if(av[0].startsWith("patch")) { int patchid0, patchid1; String idrange = av[0].substring(5); String[] ids = idrange.split("-"); if(ids.length > 1) { patchid0 = Integer.parseInt(ids[0]); patchid1 = Integer.parseInt(ids[1]); } else { patchid0 = patchid1 = Integer.parseInt(ids[0]); } if((patchid0 < 0) || (patchid1 < patchid0)) { Log.severe("Texture mapping has invalid patch index - " + av[1] + " - line " + rdr.getLineNumber() + " of " + txtname); return; } if(faces.length <= patchid1) { int[] newfaces = new int[patchid1+1]; Arrays.fill(newfaces, TILEINDEX_BLANK); System.arraycopy(faces, 0, newfaces, 0, faces.length); faces = newfaces; int[] newtxtidx = new int[patchid1+1]; Arrays.fill(newtxtidx, -1); System.arraycopy(txtidx, 0, newtxtidx, 0, txtidx.length); txtidx = newtxtidx; } int txtid = parseTextureIndex(filetoidx, srctxtid, av[1]); for(int i = patchid0; i <= patchid1; i++) { faces[i] = txtid; } } else if(av[0].equals("transparency")) { trans = BlockTransparency.valueOf(av[1]); if(trans == null) { trans = BlockTransparency.OPAQUE; Log.severe("Texture mapping has invalid transparency setting - " + av[1] + " - line " + rdr.getLineNumber() + " of " + txtname); } /* For leaves, base on leaf transparency setting */ if(trans == BlockTransparency.LEAVES) { if(core.getLeafTransparency()) trans = BlockTransparency.TRANSPARENT; else trans = BlockTransparency.OPAQUE; } /* If no water lighting fix */ if((blkids.contains(8) || blkids.contains(9)) && (HDMapManager.waterlightingfix == false)) { trans = BlockTransparency.TRANSPARENT; /* Treat water as transparent if no fix */ } } else if(av[0].equals("userenderdata")) { userenderdata = av[1].equals("true"); } else if(av[0].equals("colorMult")) { colorMult = (int)Long.parseLong(av[1], 16); } else if(av[0].equals("custColorMult")) { try { Class<?> cls = Class.forName(av[1]); custColorMult = (CustomColorMultiplier)cls.newInstance(); } catch (Exception x) { Log.severe("Error loading custom color multiplier - " + av[1] + ": " + x.getMessage()); } } else if(av[0].equals("stdrot")) { stdrot = av[1].equals("true"); } } for(String a : args) { String[] av = a.split("="); if(av.length < 2) continue; if(av[0].startsWith("layer")) { if(layers == null) { layers = new byte[faces.length]; Arrays.fill(layers, (byte)-1); } String v[] = av[0].substring(5).split("-"); int id1, id2; id1 = id2 = Integer.parseInt(v[0]); if(v.length > 1) { id2 = Integer.parseInt(v[1]); } byte val = (byte)Integer.parseInt(av[1]); for(; id1 <= id2; id1++) { layers[id1] = val; } } } /* If no data bits, assume all */ if(databits < 0) databits = 0xFFFF; /* If we have everything, build block */ if(blkids.size() > 0) { HDTextureMap map = new HDTextureMap(blkids, databits, faces, layers, trans, userenderdata, colorMult, custColorMult, blockset, stdrot); map.addToTable(); cnt++; } else { Log.severe("Texture mapping missing required parameters = line " + rdr.getLineNumber() + " of " + txtname); } } else if(line.startsWith("addtotexturemap:")) { int srctxtid = -1; String mapid = null; line = line.substring(line.indexOf(':') + 1); String[] args = line.split(","); for(String a : args) { String[] av = a.split("="); if(av.length < 2) continue; else if(av[0].equals("txtid")) { if(filetoidx.containsKey(av[1])) srctxtid = filetoidx.get(av[1]); else Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname); } else if(av[0].equals("mapid")) { mapid = av[1]; } } if(mapid != null) { for(String a : args) { String[] av = a.split("="); if(av.length < 2) continue; if(av[0].startsWith("key:")) { addTextureByKey(mapid, getIntValue(varvals, av[0].substring(4)), parseTextureIndex(filetoidx, srctxtid, av[1])); } } } else { Log.severe("Missing mapid - line " + rdr.getLineNumber() + " of " + txtname); } } else if(line.startsWith("texturemap:")) { ArrayList<Integer> blkids = new ArrayList<Integer>(); int databits = -1; String mapid = null; line = line.substring(line.indexOf(':') + 1); BlockTransparency trans = BlockTransparency.OPAQUE; int colorMult = 0; CustomColorMultiplier custColorMult = null; String[] args = line.split(","); boolean userenderdata = false; for(String a : args) { String[] av = a.split("="); if(av.length < 2) continue; if(av[0].equals("id")) { blkids.add(getIntValue(varvals, av[1])); } else if(av[0].equals("mapid")) { mapid = av[1]; } else if(av[0].equals("data")) { if(databits < 0) databits = 0; if(av[1].equals("*")) databits = 0xFFFF; else databits |= (1 << getIntValue(varvals,av[1])); } else if(av[0].equals("transparency")) { trans = BlockTransparency.valueOf(av[1]); if(trans == null) { trans = BlockTransparency.OPAQUE; Log.severe("Texture mapping has invalid transparency setting - " + av[1] + " - line " + rdr.getLineNumber() + " of " + txtname); } /* For leaves, base on leaf transparency setting */ if(trans == BlockTransparency.LEAVES) { if(core.getLeafTransparency()) trans = BlockTransparency.TRANSPARENT; else trans = BlockTransparency.OPAQUE; } /* If no water lighting fix */ if((blkids.contains(8) || blkids.contains(9)) && (HDMapManager.waterlightingfix == false)) { trans = BlockTransparency.TRANSPARENT; /* Treat water as transparent if no fix */ } } else if(av[0].equals("userenderdata")) { userenderdata = av[1].equals("true"); } else if(av[0].equals("colorMult")) { colorMult = Integer.valueOf(av[1], 16); } else if(av[0].equals("custColorMult")) { try { Class<?> cls = Class.forName(av[1]); custColorMult = (CustomColorMultiplier)cls.newInstance(); } catch (Exception x) { Log.severe("Error loading custom color multiplier - " + av[1] + ": " + x.getMessage()); } } } /* If no data bits, assume all */ if(databits < 0) databits = 0xFFFF; /* If we have everything, build texture map */ if((blkids.size() > 0) && (mapid != null)) { addTextureIndex(mapid, blkids, databits, trans, userenderdata, colorMult, custColorMult, blockset); } else { Log.severe("Texture map missing required parameters = line " + rdr.getLineNumber() + " of " + txtname); } } else if(line.startsWith("texturefile:")) { line = line.substring(line.indexOf(':')+1); String[] args = line.split(","); int xdim = 16, ydim = 16; String fname = null; String id = null; TileFileFormat fmt = TileFileFormat.GRID; for(String arg : args) { String[] aval = arg.split("="); if(aval.length < 2) continue; if(aval[0].equals("id")) id = aval[1]; else if(aval[0].equals("filename")) fname = aval[1]; else if(aval[0].equals("xcount")) xdim = Integer.parseInt(aval[1]); else if(aval[0].equals("ycount")) ydim = Integer.parseInt(aval[1]); else if(aval[0].equals("format")) { fmt = TileFileFormat.valueOf(aval[1].toUpperCase()); if(fmt == null) { Log.severe("Invalid format type " + aval[1] + " - line " + rdr.getLineNumber() + " of " + txtname); return; } } } if((fname != null) && (id != null)) { /* Register the file */ int fid = findOrAddDynamicTileFile(fname, xdim, ydim, fmt, args); filetoidx.put(id, fid); /* Save lookup */ } else { Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname); return; } } else if(line.startsWith("#") || line.startsWith(";")) { } else if(line.startsWith("enabled:")) { /* Test if texture file is enabled */ line = line.substring(8).trim(); if(line.startsWith("true")) { /* We're enabled? */ /* Nothing to do - keep processing */ } else if(line.startsWith("false")) { /* Disabled */ return; /* Quit */ } /* If setting is not defined or false, quit */ else if(config.getBoolean(line, false) == false) { return; } else { Log.info(line + " textures enabled"); } } else if(line.startsWith("var:")) { /* Test if variable declaration */ line = line.substring(4).trim(); String args[] = line.split(","); for(int i = 0; i < args.length; i++) { String[] v = args[i].split("="); if(v.length < 2) { Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname); return; } try { int val = Integer.valueOf(v[1]); /* Parse default value */ int parmval = config.getInteger(v[0], val); /* Read value, with applied default */ varvals.put(v[0], parmval); /* And save value */ } catch (NumberFormatException nfx) { Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname + ": " + nfx.getMessage()); return; } } } else if(line.startsWith("cfgfile:")) { /* If config file */ File cfgfile = new File(line.substring(8).trim()); ForgeConfigFile cfg = new ForgeConfigFile(cfgfile); if(cfg.load()) { cfg.addBlockIDs(varvals); mod_cfg_needed = false; } } else if(line.startsWith("modname:")) { String[] names = line.substring(8).split(","); boolean found = false; for(String n : names) { if(core.getServer().isModLoaded(n.trim()) == true) { found = true; Log.info(n + " textures enabled"); mod_cfg_needed = true; modname = n.trim(); break; } } if(!found) return; } else if(line.startsWith("biome:")) { line = line.substring(6).trim(); String args[] = line.split(","); int id = 0; int grasscolormult = -1; int foliagecolormult = -1; int watercolormult = -1; double rain = -1.0; double tmp = -1.0; for(int i = 0; i < args.length; i++) { String[] v = args[i].split("="); if(v.length < 2) { Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname); return; } if(v[0].equals("id")) { id = getIntValue(varvals, v[1]); } else if(v[0].equals("grassColorMult")) { grasscolormult = Integer.valueOf(v[1], 16); } else if(v[0].equals("foliageColorMult")) { foliagecolormult = Integer.valueOf(v[1], 16); } else if(v[0].equals("waterColorMult")) { watercolormult = Integer.valueOf(v[1], 16); } else if(v[0].equals("temp")) { tmp = Double.parseDouble(v[1]); } else if(v[0].equals("rain")) { rain = Double.parseDouble(v[1]); } } if(id > 0) { BiomeMap b = BiomeMap.byBiomeID(id); /* Find biome */ if(b == null) { Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname + ": " + id); } else { if(foliagecolormult != -1) b.setFoliageColorMultiplier(foliagecolormult); if(grasscolormult != -1) b.setGrassColorMultiplier(grasscolormult); if(watercolormult != -1) b.setWaterColorMultiplier(watercolormult); if(tmp != -1.0) b.setTemperature(tmp); if(rain != -1.0) b.setRainfall(rain); } } } else if(line.startsWith("version:")) { line = line.substring(line.indexOf(':')+1); String mcver = core.getDynmapPluginPlatformVersion(); String[] split = line.split("-"); if(split.length == 1) { /* Only one */ if(!mcver.equals(split[0])) { // If not match return; } } else if(split.length == 2) { /* Two : range */ if( (split[0].equals("") || (split[0].compareTo(mcver) <= 0)) && (split[1].equals("") || (split[1].compareTo(mcver) >= 0))) { Log.info("" + split[0] + "<" + mcver + "<" + split[1]); } else { return; } } else { Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname + ": " + line); } } } if(mod_cfg_needed) { Log.severe("Error loading configuration file for " + modname); } Log.verboseinfo("Loaded " + cnt + " texture mappings from " + txtname); } catch (IOException iox) { Log.severe("Error reading " + txtname + " - " + iox.toString()); } catch (NumberFormatException nfx) { Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname + ": " + nfx.getMessage()); } finally { if(rdr != null) { try { rdr.close(); rdr = null; } catch (IOException e) { } } } } /* Process any ore hiding mappings */ public static void handleHideOres() { /* Now, fix mapping if we're hiding any ores */ if(MapManager.mapman.getHideOres()) { for(int i = 0; i < 256; i++) { int id = MapManager.mapman.getBlockIDAlias(i); if(id != i) { /* New mapping? */ HDTextureMap.remapTexture(i, id); } } } } private static final int BLOCKID_GRASS = 2; private static final int BLOCKID_SNOW = 78; /** * Read color for given subblock coordinate, with given block id and data and face */ public final void readColor(final HDPerspectiveState ps, final MapIterator mapiter, final Color rslt, final int blkid, final int lastblocktype, final TexturePackHDShader.ShaderState ss) { int blkdata = ps.getBlockData(); HDTextureMap map = HDTextureMap.getMap(blkid, blkdata, ps.getBlockRenderData()); BlockStep laststep = ps.getLastBlockStep(); int patchid = ps.getTextureIndex(); /* See if patch index */ int textid; int faceindex; if(patchid >= 0) { faceindex = patchid; } else { faceindex = laststep.ordinal(); } textid = map.faces[faceindex]; if (ctm != null) { int mod = 0; if(textid >= COLORMOD_MULT_INTERNAL) { mod = (textid / COLORMOD_MULT_INTERNAL) * COLORMOD_MULT_INTERNAL; textid -= mod; } textid = mod + ctm.mapTexture(mapiter, blkid, blkdata, laststep, textid, ss); } readColor(ps, mapiter, rslt, blkid, lastblocktype, ss, blkdata, map, laststep, patchid, textid, map.stdrotate); if(map.layers != null) { /* If layered */ /* While transparent and more layers */ while(rslt.isTransparent() && (map.layers[faceindex] >= 0)) { faceindex = map.layers[faceindex]; textid = map.faces[faceindex]; readColor(ps, mapiter, rslt, blkid, lastblocktype, ss, blkdata, map, laststep, patchid, textid, map.stdrotate); } } } /** * Read color for given subblock coordinate, with given block id and data and face */ private final void readColor(final HDPerspectiveState ps, final MapIterator mapiter, final Color rslt, final int blkid, final int lastblocktype, final TexturePackHDShader.ShaderState ss, int blkdata, HDTextureMap map, BlockStep laststep, int patchid, int textid, boolean stdrot) { if(textid < 0) { rslt.setTransparent(); return; } int blkindex = indexByIDMeta(blkid, blkdata); boolean hasblockcoloring = ss.do_biome_shading && hasBlockColoring.get(blkindex); // Test if we have no texture modifications boolean simplemap = (textid < COLORMOD_MULT_INTERNAL) && (!hasblockcoloring); if (simplemap) { /* If simple mapping */ int[] texture = terrain_argb[textid]; /* Get texture coordinates (U=horizontal(left=0),V=vertical(top=0)) */ int u = 0, v = 0; /* If not patch, compute U and V */ if(patchid < 0) { int[] xyz = ps.getSubblockCoord(); switch(laststep) { case X_MINUS: /* South face: U = East (Z-), V = Down (Y-) */ u = native_scale-xyz[2]-1; v = native_scale-xyz[1]-1; break; case X_PLUS: /* North face: U = West (Z+), V = Down (Y-) */ u = xyz[2]; v = native_scale-xyz[1]-1; break; case Z_MINUS: /* West face: U = South (X+), V = Down (Y-) */ u = xyz[0]; v = native_scale-xyz[1]-1; break; case Z_PLUS: /* East face: U = North (X-), V = Down (Y-) */ u = native_scale-xyz[0]-1; v = native_scale-xyz[1]-1; break; case Y_MINUS: /* U = East(Z-), V = South(X+) */ if(stdrot) { u = xyz[0]; v = xyz[2]; } else { u = native_scale-xyz[2]-1; v = xyz[0]; } break; case Y_PLUS: if(stdrot) { u = native_scale-xyz[0]-1; v = xyz[2]; } else { u = xyz[2]; v = xyz[0]; } break; } } else { u = fastFloor(ps.getPatchU() * native_scale); v = native_scale - fastFloor(ps.getPatchV() * native_scale) - 1; } /* Read color from texture */ try { rslt.setARGB(texture[v*native_scale + u]); } catch(ArrayIndexOutOfBoundsException aoobx) { u = ((u < 0) ? 0 : ((u >= native_scale) ? (native_scale-1) : u)); v = ((v < 0) ? 0 : ((v >= native_scale) ? (native_scale-1) : v)); try { rslt.setARGB(texture[v*native_scale + u]); } catch(ArrayIndexOutOfBoundsException oob2) { } } return; } /* See if not basic block texture */ int textop = textid / COLORMOD_MULT_INTERNAL; textid = textid % COLORMOD_MULT_INTERNAL; /* If clear-inside op, get out early */ if((textop == COLORMOD_CLEARINSIDE) || (textop == COLORMOD_MULTTONED_CLEARINSIDE)) { /* Check if previous block is same block type as we are: surface is transparent if it is */ if(blkid == lastblocktype) { rslt.setTransparent(); return; } /* If water block, to watercolor tone op */ if((blkid == 8) || (blkid == 9)) { textop = water_toned_op; } else if(textop == COLORMOD_MULTTONED_CLEARINSIDE) { textop = COLORMOD_MULTTONED; } } int[] texture = terrain_argb[textid]; /* Get texture coordinates (U=horizontal(left=0),V=vertical(top=0)) */ int u = 0, v = 0, tmp; if(patchid < 0) { int[] xyz = ps.getSubblockCoord(); switch(laststep) { case X_MINUS: /* South face: U = East (Z-), V = Down (Y-) */ u = native_scale-xyz[2]-1; v = native_scale-xyz[1]-1; break; case X_PLUS: /* North face: U = West (Z+), V = Down (Y-) */ u = xyz[2]; v = native_scale-xyz[1]-1; break; case Z_MINUS: /* West face: U = South (X+), V = Down (Y-) */ u = xyz[0]; v = native_scale-xyz[1]-1; break; case Z_PLUS: /* East face: U = North (X-), V = Down (Y-) */ u = native_scale-xyz[0]-1; v = native_scale-xyz[1]-1; break; case Y_MINUS: /* U = East(Z-), V = South(X+) */ if(stdrot) { u = xyz[0]; v = xyz[2]; } else { u = native_scale-xyz[2]-1; v = xyz[0]; } break; case Y_PLUS: if(stdrot) { u = native_scale-xyz[0]-1; v = xyz[2]; } else { u = xyz[2]; v = xyz[0]; } break; } } else { u = fastFloor(ps.getPatchU() * native_scale); v = native_scale - fastFloor(ps.getPatchV() * native_scale) - 1; } /* Handle U-V transorms before fetching color */ switch(textop) { case COLORMOD_ROT90: tmp = u; u = native_scale - v - 1; v = tmp; break; case COLORMOD_ROT180: u = native_scale - u - 1; v = native_scale - v - 1; break; case COLORMOD_ROT270: case COLORMOD_GRASSTONED270: case COLORMOD_FOLIAGETONED270: case COLORMOD_WATERTONED270: tmp = u; u = v; v = native_scale - tmp - 1; break; case COLORMOD_FLIPHORIZ: u = native_scale - u - 1; break; case COLORMOD_SHIFTDOWNHALF: if(v < native_scale/2) { rslt.setTransparent(); return; } v -= native_scale/2; break; case COLORMOD_SHIFTDOWNHALFANDFLIPHORIZ: if(v < native_scale/2) { rslt.setTransparent(); return; } v -= native_scale/2; u = native_scale - u - 1; break; case COLORMOD_INCLINEDTORCH: if(v >= (3*native_scale/4)) { rslt.setTransparent(); return; } v += native_scale/4; if(u < native_scale/2) u = native_scale/2-1; if(u > native_scale/2) u = native_scale/2; break; case COLORMOD_GRASSSIDE: boolean do_grass_side = false; boolean do_snow_side = false; if(ss.do_better_grass) { mapiter.unstepPosition(laststep); if(mapiter.getBlockTypeID() == BLOCKID_SNOW) do_snow_side = true; if(mapiter.getBlockTypeIDAt(BlockStep.Y_MINUS) == BLOCKID_GRASS) do_grass_side = true; mapiter.stepPosition(laststep); } /* Check if snow above block */ if(mapiter.getBlockTypeIDAt(BlockStep.Y_PLUS) == BLOCKID_SNOW) { if(do_snow_side) { texture = terrain_argb[TILEINDEX_SNOW]; /* Snow full side block */ textid = TILEINDEX_SNOW; } else { texture = terrain_argb[TILEINDEX_SNOWSIDE]; /* Snow block */ textid = TILEINDEX_SNOWSIDE; } textop = 0; } else { /* Else, check the grass color overlay */ if(do_grass_side) { texture = terrain_argb[TILEINDEX_GRASS]; /* Grass block */ textid = TILEINDEX_GRASS; textop = COLORMOD_GRASSTONED; /* Force grass toning */ } else { int ovclr = terrain_argb[TILEINDEX_GRASSMASK][v*native_scale+u]; if((ovclr & 0xFF000000) != 0) { /* Hit? */ texture = terrain_argb[TILEINDEX_GRASSMASK]; /* Use it */ textop = COLORMOD_GRASSTONED; /* Force grass toning */ } } } break; case COLORMOD_LILYTONED: /* Rotate texture based on lily orientation function (from renderBlockLilyPad in RenderBlocks.jara in MCP) */ long l1 = (long)(mapiter.getX() * 0x2fc20f) ^ (long)mapiter.getZ() * 0x6ebfff5L ^ (long)mapiter.getY(); l1 = l1 * l1 * 0x285b825L + l1 * 11L; int orientation = (int)(l1 >> 16 & 3L); switch(orientation) { case 0: tmp = u; u = native_scale - v - 1; v = tmp; break; case 1: u = native_scale - u - 1; v = native_scale - v - 1; break; case 2: tmp = u; u = v; v = native_scale - tmp - 1; break; case 3: break; } break; } /* Read color from texture */ try { rslt.setARGB(texture[v*native_scale + u]); } catch (ArrayIndexOutOfBoundsException aioobx) { rslt.setARGB(0); } int clrmult = -1; int clralpha = 0xFF000000; // If block has custom coloring if (hasblockcoloring) { Integer idx = (Integer) this.blockColoring.get(blkindex); LoadedImage img = imgs[idx]; if (img.argb != null) { clrmult = mapiter.getSmoothWaterColorMultiplier(img.argb); } else { hasblockcoloring = false; } } if (!hasblockcoloring) { // Switch based on texture modifier switch(textop) { case COLORMOD_GRASSTONED: case COLORMOD_GRASSTONED270: if(ss.do_biome_shading) { if(imgs[IMG_SWAMPGRASSCOLOR] != null) clrmult = mapiter.getSmoothColorMultiplier(imgs[IMG_GRASSCOLOR].argb, imgs[IMG_SWAMPGRASSCOLOR].argb); else clrmult = mapiter.getSmoothGrassColorMultiplier(imgs[IMG_GRASSCOLOR].argb); } else { clrmult = imgs[IMG_GRASSCOLOR].trivial_color; } break; case COLORMOD_FOLIAGETONED: case COLORMOD_FOLIAGETONED270: if(ss.do_biome_shading) { if(imgs[IMG_SWAMPFOLIAGECOLOR] != null) clrmult = mapiter.getSmoothColorMultiplier(imgs[IMG_FOLIAGECOLOR].argb, imgs[IMG_SWAMPFOLIAGECOLOR].argb); else clrmult = mapiter.getSmoothFoliageColorMultiplier(imgs[IMG_FOLIAGECOLOR].argb); } else { clrmult = imgs[IMG_FOLIAGECOLOR].trivial_color; } break; case COLORMOD_FOLIAGEMULTTONED: if(ss.do_biome_shading) { if(imgs[IMG_SWAMPFOLIAGECOLOR] != null) clrmult = mapiter.getSmoothColorMultiplier(imgs[IMG_FOLIAGECOLOR].argb, imgs[IMG_SWAMPFOLIAGECOLOR].argb); else clrmult = mapiter.getSmoothFoliageColorMultiplier(imgs[IMG_FOLIAGECOLOR].argb); } else { clrmult = imgs[IMG_FOLIAGECOLOR].trivial_color; } if(map.custColorMult != null) { clrmult = ((clrmult & 0xFEFEFE) + map.custColorMult.getColorMultiplier(mapiter)) / 2; } else { clrmult = ((clrmult & 0xFEFEFE) + map.colorMult) / 2; } break; case COLORMOD_WATERTONED: case COLORMOD_WATERTONED270: if(imgs[IMG_WATERCOLORX] != null) { if(ss.do_biome_shading) { clrmult = mapiter.getSmoothWaterColorMultiplier(imgs[IMG_WATERCOLORX].argb); } else { clrmult = imgs[IMG_WATERCOLORX].trivial_color; } } else { if(ss.do_biome_shading) clrmult = mapiter.getSmoothWaterColorMultiplier(); } break; case COLORMOD_BIRCHTONED: clrmult = 0x80a755; /* From ColorizerFoliage.java in MCP */ break; case COLORMOD_PINETONED: clrmult = 0x619961; /* From ColorizerFoliage.java in MCP */ break; case COLORMOD_LILYTONED: clrmult = 0x208030; /* from BlockLilyPad.java in MCP */ break; case COLORMOD_MULTTONED: /* Use color multiplier */ if(map.custColorMult != null) { clrmult = map.custColorMult.getColorMultiplier(mapiter); } else { clrmult = map.colorMult; } if((clrmult & 0xFF000000) != 0) { clralpha = clrmult; } break; } } if((clrmult != -1) && (clrmult != 0)) { rslt.blendColor(clrmult | clralpha); } } private static final void makeAlphaPure(int[] argb) { for(int i = 0; i < argb.length; i++) { if((argb[i] & 0xFF000000) != 0) argb[i] |= 0xFF000000; } } private static final int fastFloor(double f) { return ((int)(f + 1000000000.0)) - 1000000000; } /** * Get tile index, based on tile file name and relative index within tile file * @param fname - filename * @param idx - tile index (= (y * xdim) + x) * @return global tile index, or -1 if not found */ public static int findDynamicTile(String fname, int idx) { DynamicTileFile f; /* Find existing, if already there */ f = addonfilesbyname.get(fname); if (f != null) { if ((idx >= 0) && (idx < f.tile_to_dyntile.length) && (f.tile_to_dyntile[idx] >= 0)) { return f.tile_to_dyntile[idx]; } } return -1; } /** * Add new dynmaic file definition, or return existing * * @param fname * @param xdim * @param ydim * @param fmt * @param args * @return dynamic file index */ public static int findOrAddDynamicTileFile(String fname, int xdim, int ydim, TileFileFormat fmt, String[] args) { DynamicTileFile f; /* Find existing, if already there */ f = addonfilesbyname.get(fname); if (f != null) { return f.idx; } /* Add new tile file entry */ f = new DynamicTileFile(); f.filename = fname; f.tilecnt_x = xdim; f.tilecnt_y = ydim; f.format = fmt; switch(fmt) { case GRID: f.tile_to_dyntile = new int[xdim*ydim]; break; case CHEST: f.tile_to_dyntile = new int[TILEINDEX_CHEST_COUNT]; /* 6 images for chest tile */ break; case BIGCHEST: f.tile_to_dyntile = new int[TILEINDEX_BIGCHEST_COUNT]; /* 10 images for chest tile */ break; case SIGN: f.tile_to_dyntile = new int[TILEINDEX_SIGN_COUNT]; /* 10 images for sign tile */ break; case CUSTOM: { List<CustomTileRec> recs = new ArrayList<CustomTileRec>(); for(String a : args) { String[] v = a.split("="); if(v.length != 2) continue; if(v[0].startsWith("tile")) { int id = 0; try { id = Integer.parseInt(v[0].substring(4)); } catch (NumberFormatException nfx) { Log.warning("Bad tile ID: " + v[0]); continue; } while(recs.size() <= id) { recs.add(null); } CustomTileRec rec = new CustomTileRec(); try { String[] coords = v[1].split("/"); String[] topleft = coords[0].split(":"); rec.srcx = Integer.parseInt(topleft[0]); rec.srcy = Integer.parseInt(topleft[1]); String[] size = coords[1].split(":"); rec.width = Integer.parseInt(size[0]); rec.height = Integer.parseInt(size[1]); if(coords.length >= 3) { String[] dest = coords[2].split(":"); rec.targetx = Integer.parseInt(dest[0]); rec.targety = Integer.parseInt(dest[1]); } recs.set(id, rec); } catch (Exception x) { Log.warning("Bad custom tile coordinate: " + v[1]); } } } f.tile_to_dyntile = new int[recs.size()]; f.cust = recs; } break; case SKIN: f.tile_to_dyntile = new int[TILEINDEX_SKIN_COUNT]; /* 6 images for skin tile */ break; case TILESET: f.tile_to_dyntile = new int[xdim*ydim]; break; default: f.tile_to_dyntile = new int[xdim*ydim]; break; } Arrays.fill(f.tile_to_dyntile, -1); f.idx = addonfiles.size(); addonfiles.add(f); addonfilesbyname.put(f.filename, f); //Log.info("File " + fname + "(" + f.idx + ")=" + fmt.toString()); return f.idx; } /** * Add or find dynamic tile index of given dynamic tile * @param dynfile_idx - index of file * @param tile_id - ID of tile within file * @return global tile ID */ public static int findOrAddDynamicTile(int dynfile_idx, int tile_id) { DynamicTileFile f = addonfiles.get(dynfile_idx); if(f == null) { Log.warning("Invalid add-on file index: " + dynfile_idx); return 0; } if(f.tile_to_dyntile[tile_id] < 0) { /* Not assigned yet? */ f.tile_to_dyntile[tile_id] = next_dynamic_tile; next_dynamic_tile++; /* Allocate next ID */ } return f.tile_to_dyntile[tile_id]; } private static final int[] smooth_water_mult = new int[10]; public static int getTextureIDAt(MapIterator mapiter, int blkdata, int blkmeta, BlockStep face) { HDTextureMap map = HDTextureMap.getMap(blkdata, blkmeta, blkmeta); int idx = -1; if (map != null) { int sideidx = face.ordinal(); if (map.faces != null) { if (sideidx < map.faces.length) idx = map.faces[sideidx]; else idx = map.faces[0]; } } if(idx > 0) idx = idx % COLORMOD_MULT_INTERNAL; return idx; } private static final String PALETTE_BLOCK_KEY = "palette.block."; private void processCustomColorMap(String fname, String ids) { // Register file name int idx = findOrAddDynamicTileFile(fname, 1, 1, TileFileFormat.BIOME, new String[0]); if(idx < 0) { Log.info("Error registering custom color file: " + fname); return; } Integer index = idx + IMG_CNT; // Now, parse block ID list for (String id : ids.split("\\s+")) { String[] tok = id.split(":"); int meta = -1; int blkid = -1; if (tok.length == 1) { /* Only ID */ try { blkid = Integer.parseInt(tok[0]); } catch (NumberFormatException nfx) { Log.info("Bad custom color block ID: " + tok[0]); } } else if (tok.length == 2) { /* ID : meta */ try { blkid = Integer.parseInt(tok[0]); } catch (NumberFormatException nfx) { Log.info("Bad custom color block ID: " + tok[0]); } try { meta = Integer.parseInt(tok[1]); } catch (NumberFormatException nfx) { Log.info("Bad custom color meta ID: " + tok[1]); } } /* Add mappings for values */ if ((blkid > 0) && (blkid < 4096)) { if ((meta >= 0) && (meta < 16)) { int idm = indexByIDMeta(blkid, meta); this.hasBlockColoring.set(idm); this.blockColoring.put(idm, index); } else if (meta == -1) { /* All meta IDs */ for (meta = 0; meta < 16; meta++) { int idm = indexByIDMeta(blkid, meta); this.hasBlockColoring.set(idm); this.blockColoring.put(idm, index); } } } } } private void processCustomColors(Properties p) { // Loop through keys for(String pname : p.stringPropertyNames()) { if(!pname.startsWith(PALETTE_BLOCK_KEY)) continue; String v = p.getProperty(pname); String fname = pname.substring(PALETTE_BLOCK_KEY.length()).trim(); // Get filename of color map if(fname.charAt(0) == '/') fname = fname.substring(1); // Strip leading / processCustomColorMap(fname, v); } } private static final int indexByIDMeta(int blkid, int meta) { return ((blkid << 4) | meta); } static { /* * Generate smoothed swamp multipliers (indexed by swamp biome count) */ Color c = new Color(); for(int i = 0; i < 10; i++) { /* Use water color multiplier base for 1.1 (E0FFAE) */ int r = (((9-i) * 0xFF) + (i * 0xE0)) / 9; int g = 0xFF; int b = (((9-i) * 0xFF) + (i * 0xAE)) / 9; c.setRGBA(r & 0xFE, g & 0xFE, b & 0xFE, 0xFF); smooth_water_mult[i] = c.getARGB(); } } }
Remove texture pack version debug
src/main/java/org/dynmap/hdmap/TexturePack.java
Remove texture pack version debug
Java
apache-2.0
a18a5dd0a6b2ffb9f5a579cb1d4cc7d35e430151
0
GlenRSmith/elasticsearch,robin13/elasticsearch,GlenRSmith/elasticsearch,robin13/elasticsearch,robin13/elasticsearch,GlenRSmith/elasticsearch,robin13/elasticsearch,robin13/elasticsearch,GlenRSmith/elasticsearch,GlenRSmith/elasticsearch
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.index.mapper; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.IndexAnalyzers; import java.util.Collections; public class DocumentMapper { private final String type; private final CompressedXContent mappingSource; private final MappingLookup mappingLookup; public DocumentMapper(RootObjectMapper.Builder rootBuilder, MapperService mapperService) { this( mapperService.getIndexSettings(), mapperService.getIndexAnalyzers(), mapperService.documentParser(), new Mapping( rootBuilder.build(new ContentPath(1)), mapperService.getMetadataMappers().values().toArray(new MetadataFieldMapper[0]), Collections.emptyMap())); } DocumentMapper(IndexSettings indexSettings, IndexAnalyzers indexAnalyzers, DocumentParser documentParser, Mapping mapping) { this.type = mapping.getRoot().name(); this.mappingLookup = MappingLookup.fromMapping(mapping, documentParser, indexSettings, indexAnalyzers); this.mappingSource = mapping.toCompressedXContent(); } public Mapping mapping() { return mappingLookup.getMapping(); } public String type() { return this.type; } public CompressedXContent mappingSource() { return this.mappingSource; } public <T extends MetadataFieldMapper> T metadataMapper(Class<T> type) { return mapping().getMetadataMapperByClass(type); } public SourceFieldMapper sourceMapper() { return metadataMapper(SourceFieldMapper.class); } public IdFieldMapper idFieldMapper() { return metadataMapper(IdFieldMapper.class); } public RoutingFieldMapper routingFieldMapper() { return metadataMapper(RoutingFieldMapper.class); } public IndexFieldMapper IndexFieldMapper() { return metadataMapper(IndexFieldMapper.class); } public MappingLookup mappers() { return this.mappingLookup; } public ParsedDocument parse(SourceToParse source) throws MapperParsingException { return mappingLookup.parseDocument(source); } public void validate(IndexSettings settings, boolean checkLimits) { this.mapping().validate(this.mappingLookup); if (settings.getIndexMetadata().isRoutingPartitionedIndex()) { if (routingFieldMapper().required() == false) { throw new IllegalArgumentException("mapping type [" + type() + "] must have routing " + "required for partitioned index [" + settings.getIndex().getName() + "]"); } } if (settings.getIndexSortConfig().hasIndexSort() && mappers().hasNested()) { throw new IllegalArgumentException("cannot have nested fields when index sort is activated"); } if (checkLimits) { this.mappingLookup.checkLimits(settings); } } }
server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.index.mapper; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.IndexAnalyzers; import java.util.Collections; public class DocumentMapper { private final String type; private final CompressedXContent mappingSource; private final DocumentParser documentParser; private final MappingLookup mappingLookup; public DocumentMapper(RootObjectMapper.Builder rootBuilder, MapperService mapperService) { this( mapperService.getIndexSettings(), mapperService.getIndexAnalyzers(), mapperService.documentParser(), new Mapping( rootBuilder.build(new ContentPath(1)), mapperService.getMetadataMappers().values().toArray(new MetadataFieldMapper[0]), Collections.emptyMap())); } DocumentMapper(IndexSettings indexSettings, IndexAnalyzers indexAnalyzers, DocumentParser documentParser, Mapping mapping) { this.type = mapping.getRoot().name(); this.documentParser = documentParser; this.mappingLookup = MappingLookup.fromMapping(mapping, documentParser, indexSettings, indexAnalyzers); this.mappingSource = mapping.toCompressedXContent(); } public Mapping mapping() { return mappingLookup.getMapping(); } public String type() { return this.type; } public CompressedXContent mappingSource() { return this.mappingSource; } public <T extends MetadataFieldMapper> T metadataMapper(Class<T> type) { return mapping().getMetadataMapperByClass(type); } public SourceFieldMapper sourceMapper() { return metadataMapper(SourceFieldMapper.class); } public IdFieldMapper idFieldMapper() { return metadataMapper(IdFieldMapper.class); } public RoutingFieldMapper routingFieldMapper() { return metadataMapper(RoutingFieldMapper.class); } public IndexFieldMapper IndexFieldMapper() { return metadataMapper(IndexFieldMapper.class); } public MappingLookup mappers() { return this.mappingLookup; } public ParsedDocument parse(SourceToParse source) throws MapperParsingException { return documentParser.parseDocument(source, mappingLookup); } public void validate(IndexSettings settings, boolean checkLimits) { this.mapping().validate(this.mappingLookup); if (settings.getIndexMetadata().isRoutingPartitionedIndex()) { if (routingFieldMapper().required() == false) { throw new IllegalArgumentException("mapping type [" + type() + "] must have routing " + "required for partitioned index [" + settings.getIndex().getName() + "]"); } } if (settings.getIndexSortConfig().hasIndexSort() && mappers().hasNested()) { throw new IllegalArgumentException("cannot have nested fields when index sort is activated"); } if (checkLimits) { this.mappingLookup.checkLimits(settings); } } }
DocumentMapper to parse through a MappingLookup (#72554) DocumentMapper currently holds a reference to a DocumentParser, the same that it uses to create its inner MappingLookup which exposes a parseDocument method. The parse method exposed by DocumentMapper can then directly call MappingLookup#parseDocument.
server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java
DocumentMapper to parse through a MappingLookup (#72554)
Java
bsd-3-clause
2117b66de465000d0afa14f203f98a18b43a32bd
0
UoLCompSoc/ld32,UoLCompSoc/ld32,UoLCompSoc/ld32
package uk.org.ulcompsoc.ld32.util; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.files.FileHandle; import com.badlogic.gdx.graphics.Texture; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.utils.Disposable; public class TextureManager implements Disposable { public final List<Texture> textures = new ArrayList<Texture>(); public final Map<TextureName, Texture> nameMap = new HashMap<TextureName, Texture>(); public final Map<TextureName, TextureRegion[]> animationRegionMap = new HashMap<TextureName, TextureRegion[]>(); public final Map<Character, TextureRegion> mapOfChars = new HashMap<Character, TextureRegion>(); private Texture temp; public TextureManager() { } public void load() { for (final TextureName texName : TextureName.values()) { final FileHandle handle = Gdx.files.internal(texName.assetName); final Texture texture = new Texture(handle); textures.add(texture); nameMap.put(texName, texture); if (texName.isAnimated) { animationRegionMap.put(texName, TextureRegion.split(texture, texName.frameWidth, texName.frameHeight)[0]); } } fillCharTextures(); } private void fillCharTextures() { final TextureRegion[] temp = animationRegionMap.get(TextureName.FONT); for (int i = 0; i < 25; ++i) { mapOfChars.put((char) ('A' + i), temp[i]); } for (int i = 0, j = 26; i < 25; ++i, ++j) { mapOfChars.put((char) ('a' + i), temp[j]); } } @Override public void dispose() { animationRegionMap.clear(); nameMap.clear(); for (final Texture t : textures) { t.dispose(); } textures.clear(); } }
core/src/uk/org/ulcompsoc/ld32/util/TextureManager.java
package uk.org.ulcompsoc.ld32.util; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.files.FileHandle; import com.badlogic.gdx.graphics.Texture; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.utils.Disposable; public class TextureManager implements Disposable { public final List<Texture> textures = new ArrayList<Texture>(); public final Map<TextureName, Texture> nameMap = new HashMap<TextureName, Texture>(); public final Map<TextureName, TextureRegion[]> animationRegionMap = new HashMap<TextureName, TextureRegion[]>(); public final Map<Character, TextureRegion> mapOfChars = new HashMap<Character, TextureRegion>(); private Texture temp; public TextureManager() { } public void load() { for (final TextureName texName : TextureName.values()) { final FileHandle handle = Gdx.files.internal(texName.assetName); final Texture texture = new Texture(handle); textures.add(texture); nameMap.put(texName, texture); if (texName.isAnimated) { animationRegionMap.put(texName, TextureRegion.split(texture, texName.frameWidth, texName.frameHeight)[0]); } } fillCharTextures(); } private void fillCharTextures() { TextureRegion[] temp = animationRegionMap.get(TextureName.FONT); for(int i = 0; i<25; i++){ mapOfChars.put((char)((int)'A'+i), temp[i]); } for(int i=0,j=26; i<25; i++, j++){ mapOfChars.put((char)((int)'a'+i), temp[j]); } } @Override public void dispose() { animationRegionMap.clear(); nameMap.clear(); for (final Texture t : textures) { t.dispose(); } textures.clear(); } }
Minor changes to font loader
core/src/uk/org/ulcompsoc/ld32/util/TextureManager.java
Minor changes to font loader
Java
bsd-3-clause
e10c762218b4d7ffcae0cddbe26c8715204ba509
0
ra4king/CircuitSim,ra4king/CircuitSim,ra4king/CircuitSimulator,ra4king/CircuitSim
package com.ra4king.circuitsim.gui; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collections; import java.util.Deque; import java.util.List; import java.util.Set; import com.ra4king.circuitsim.gui.LinkWires.Wire; import javafx.scene.control.Tab; /** * @author Roi Atalla */ public class EditHistory { public enum EditAction { CREATE_CIRCUIT { protected void redo(CircuitManager manager, Object[] params) { manager.getSimulatorWindow().readdCircuit(manager, (Tab)params[0], (int)params[1]); } protected void undo(CircuitManager manager, Object[] params) { manager.getSimulatorWindow().deleteCircuit(manager, true, false); } }, RENAME_CIRCUIT { protected void redo(CircuitManager manager, Object[] params) { ((CircuitSim)params[0]).renameCircuit((Tab)params[1], (String)params[3]); } protected void undo(CircuitManager manager, Object[] params) { ((CircuitSim)params[0]).renameCircuit((Tab)params[1], (String)params[2]); } }, MOVE_CIRCUIT { protected void redo(CircuitManager manager, Object[] params) { @SuppressWarnings("unchecked") List<Tab> tabs = (List<Tab>)params[0]; Tab tab = (Tab)params[1]; int fromIdx = (int)params[2]; int toIdx = (int)params[3]; if(tabs.indexOf(tab) != fromIdx) { throw new IllegalStateException("Something bad happened!"); } tabs.remove(fromIdx); tabs.add(toIdx, tab); manager.getSimulatorWindow().refreshCircuitsTab(); } protected void undo(CircuitManager manager, Object[] params) { @SuppressWarnings("unchecked") List<Tab> tabs = (List<Tab>)params[0]; Tab tab = (Tab)params[1]; int fromIdx = (int)params[2]; int toIdx = (int)params[3]; // swap to/from idx redo(manager, new Object[] { tabs, tab, toIdx, fromIdx }); } }, DELETE_CIRCUIT { protected void redo(CircuitManager manager, Object[] params) { CREATE_CIRCUIT.undo(manager, params); } protected void undo(CircuitManager manager, Object[] params) { CREATE_CIRCUIT.redo(manager, params); } }, ADD_COMPONENT { protected void redo(CircuitManager manager, Object[] params) { manager.mayThrow(() -> manager.getCircuitBoard().addComponent((ComponentPeer<?>)params[0])); } protected void undo(CircuitManager manager, Object[] params) { ComponentPeer<?> toRemove = (ComponentPeer<?>)params[0]; for(ComponentPeer<?> component : manager.getCircuitBoard().getComponents()) { if(component == toRemove || (component.getClass() == toRemove.getClass() && component.getX() == toRemove.getX() && component.getY() == toRemove.getY() && component.getProperties().equals(toRemove.getProperties()))) { manager.mayThrow(() -> manager.getCircuitBoard() .removeElements(Collections.singleton(component))); break; } } } }, UPDATE_COMPONENT { protected void redo(CircuitManager manager, Object[] params) { manager.mayThrow(() -> manager.getCircuitBoard().updateComponent((ComponentPeer<?>)params[0], (ComponentPeer<?>)params[1])); } protected void undo(CircuitManager manager, Object[] params) { manager.mayThrow(() -> manager.getCircuitBoard().updateComponent((ComponentPeer<?>)params[1], (ComponentPeer<?>)params[0])); } }, MOVE_ELEMENTS { protected void redo(CircuitManager manager, Object[] params) { @SuppressWarnings("unchecked") Set<GuiElement> elements = (Set<GuiElement>)params[0]; int dx = (int)params[1]; int dy = (int)params[2]; @SuppressWarnings("unchecked") Set<Wire> wiresToAdd = (Set<Wire>)params[3]; @SuppressWarnings("unchecked") Set<Wire> wiresToRemove = (Set<Wire>)params[4]; manager.mayThrow( () -> manager .getCircuitBoard() .removeElements(wiresToRemove)); manager.mayThrow(() -> manager.getCircuitBoard().initMove(elements)); manager.getCircuitBoard().moveElements(dx, dy, false); manager.mayThrow(() -> manager.getCircuitBoard().finalizeMove()); wiresToAdd.forEach(w -> manager.mayThrow( () -> manager .getCircuitBoard() .addWire(w.getX(), w.getY(), w.getLength(), w.isHorizontal()))); } protected void undo(CircuitManager manager, Object[] params) { @SuppressWarnings("unchecked") Set<GuiElement> elements = (Set<GuiElement>)params[0]; int dx = -(int)params[1]; int dy = -(int)params[2]; @SuppressWarnings("unchecked") Set<Wire> wiresToRemove = (Set<Wire>)params[3]; @SuppressWarnings("unchecked") Set<Wire> wiresToAdd = (Set<Wire>)params[4]; manager.mayThrow( () -> manager .getCircuitBoard() .removeElements(wiresToRemove)); manager.mayThrow(() -> manager.getCircuitBoard().initMove(elements)); manager.getCircuitBoard().moveElements(dx, dy, false); manager.mayThrow(() -> manager.getCircuitBoard().finalizeMove()); wiresToAdd.forEach(w -> manager.mayThrow( () -> manager .getCircuitBoard() .addWire(w.getX(), w.getY(), w.getLength(), w.isHorizontal()))); } }, REMOVE_COMPONENT { protected void redo(CircuitManager manager, Object[] params) { ADD_COMPONENT.undo(manager, params); } protected void undo(CircuitManager manager, Object[] params) { ADD_COMPONENT.redo(manager, params); } }, ADD_WIRE { protected void redo(CircuitManager manager, Object[] params) { Wire wire = (Wire)params[0]; manager.mayThrow(() -> manager.getCircuitBoard() .addWire(wire.getX(), wire.getY(), wire.getLength(), wire.isHorizontal())); } protected void undo(CircuitManager manager, Object[] params) { manager.mayThrow(() -> manager.getCircuitBoard() .removeElements(Collections.singleton((Wire)params[0]))); } }, REMOVE_WIRE { protected void redo(CircuitManager manager, Object[] params) { ADD_WIRE.undo(manager, params); } protected void undo(CircuitManager manager, Object[] params) { ADD_WIRE.redo(manager, params); } }; protected abstract void redo(CircuitManager manager, Object[] params); protected abstract void undo(CircuitManager manager, Object[] params); } private class Edit { EditAction action; CircuitManager circuitManager; Object[] params; Edit(EditAction action, CircuitManager circuitManager, Object[] params) { this.action = action; this.circuitManager = circuitManager; this.params = params; } } public interface EditListener { void edit(EditAction action, CircuitManager manager, Object[] params); } private Deque<List<Edit>> editStack; private Deque<List<Edit>> redoStack; private static final int MAX_HISTORY = 300; private List<EditListener> editListeners; public EditHistory() { editStack = new ArrayDeque<>(); redoStack = new ArrayDeque<>(); editListeners = new ArrayList<>(); } public void clear() { editStack.clear(); redoStack.clear(); } private int disableDepth = 0; public void enable() { disableDepth--; if(disableDepth < 0) { throw new IllegalStateException("This should never happen!"); } } public void disable() { disableDepth++; } public void addListener(EditListener listener) { editListeners.add(listener); } public void removeListener(EditListener listener) { editListeners.remove(listener); } private int groupDepth = 0; private List<List<Edit>> groups; public void beginGroup() { groupDepth++; if(groups == null) { if(groupDepth != 1) throw new IllegalStateException("How the hell did this happen??"); groups = new ArrayList<>(); } groups.add(new ArrayList<>()); } public void endGroup() { if(groupDepth == 0) throw new IllegalStateException("Mismatched call to endGroup."); groupDepth--; if(groupDepth == 0) { if(groups == null) throw new IllegalStateException("This can't be null?!"); if(groups.size() != 1) throw new IllegalStateException("There should only be a single group left"); List<Edit> edits = groups.get(0); if(!edits.isEmpty()) { editStack.push(edits); if(editStack.size() > MAX_HISTORY) { editStack.removeLast(); } } groups = null; } else { groups.get(groupDepth - 1).addAll(groups.get(groupDepth)); groups.remove(groupDepth); } } public void clearGroup() { if(groups == null) throw new IllegalStateException("No group started"); groups.get(groupDepth - 1).clear(); groups.subList(groupDepth, groups.size()).clear(); } public void addAction(EditAction action, CircuitManager manager, Object... params) { if(disableDepth == 0) { beginGroup(); groups.get(groupDepth - 1).add(new Edit(action, manager, params)); endGroup(); redoStack.clear(); editListeners.forEach(listener -> listener.edit(action, manager, params)); } } public int editStackSize() { return editStack.size() + (groups == null || groups.isEmpty() ? 0 : 1); } public int redoStackSize() { return redoStack.size(); } public CircuitManager undo() { if(editStack.isEmpty()) { return null; } disable(); List<Edit> popped = editStack.pop(); redoStack.push(popped); for(int i = popped.size() - 1; i >= 0; i--) { Edit edit = popped.get(i); edit.action.undo(edit.circuitManager, edit.params); editListeners.forEach(listener -> listener.edit(edit.action, edit.circuitManager, edit.params)); } enable(); return popped.get(0).circuitManager; } public CircuitManager redo() { if(redoStack.isEmpty()) { return null; } disable(); List<Edit> popped = redoStack.pop(); editStack.push(popped); if(editStack.size() > MAX_HISTORY) { editStack.removeLast(); } for(Edit edit : popped) { edit.action.redo(edit.circuitManager, edit.params); editListeners.forEach(listener -> listener.edit(edit.action, edit.circuitManager, edit.params)); } enable(); return popped.get(0).circuitManager; } }
src/com/ra4king/circuitsim/gui/EditHistory.java
package com.ra4king.circuitsim.gui; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Deque; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import com.ra4king.circuitsim.gui.LinkWires.Wire; import javafx.scene.control.Tab; /** * @author Roi Atalla */ public class EditHistory { public enum EditAction { CREATE_CIRCUIT { protected void redo(CircuitManager manager, Object[] params) { manager.getSimulatorWindow().readdCircuit(manager, (Tab)params[0], (int)params[1]); } protected void undo(CircuitManager manager, Object[] params) { manager.getSimulatorWindow().deleteCircuit(manager, true, false); } }, RENAME_CIRCUIT { protected void redo(CircuitManager manager, Object[] params) { ((CircuitSim)params[0]).renameCircuit((Tab)params[1], (String)params[3]); } protected void undo(CircuitManager manager, Object[] params) { ((CircuitSim)params[0]).renameCircuit((Tab)params[1], (String)params[2]); } }, MOVE_CIRCUIT { protected void redo(CircuitManager manager, Object[] params) { @SuppressWarnings("unchecked") List<Tab> tabs = (List<Tab>)params[0]; Tab tab = (Tab)params[1]; int fromIdx = (int)params[2]; int toIdx = (int)params[3]; if(tabs.indexOf(tab) != fromIdx) { throw new IllegalStateException("Something bad happened!"); } tabs.remove(fromIdx); tabs.add(toIdx, tab); manager.getSimulatorWindow().refreshCircuitsTab(); } protected void undo(CircuitManager manager, Object[] params) { @SuppressWarnings("unchecked") List<Tab> tabs = (List<Tab>)params[0]; Tab tab = (Tab)params[1]; int fromIdx = (int)params[2]; int toIdx = (int)params[3]; // swap to/from idx redo(manager, new Object[] { tabs, tab, toIdx, fromIdx }); } }, DELETE_CIRCUIT { protected void redo(CircuitManager manager, Object[] params) { CREATE_CIRCUIT.undo(manager, params); } protected void undo(CircuitManager manager, Object[] params) { CREATE_CIRCUIT.redo(manager, params); } }, ADD_COMPONENT { protected void redo(CircuitManager manager, Object[] params) { manager.mayThrow(() -> manager.getCircuitBoard().addComponent((ComponentPeer<?>)params[0])); } protected void undo(CircuitManager manager, Object[] params) { ComponentPeer<?> toRemove = (ComponentPeer<?>)params[0]; for(ComponentPeer<?> component : manager.getCircuitBoard().getComponents()) { if(component == toRemove || (component.getClass() == toRemove.getClass() && component.getX() == toRemove.getX() && component.getY() == toRemove.getY() && component.getProperties().equals(toRemove.getProperties()))) { manager.mayThrow(() -> manager.getCircuitBoard() .removeElements(Collections.singleton(component))); break; } } } }, UPDATE_COMPONENT { protected void redo(CircuitManager manager, Object[] params) { manager.mayThrow(() -> manager.getCircuitBoard().updateComponent((ComponentPeer<?>)params[0], (ComponentPeer<?>)params[1])); } protected void undo(CircuitManager manager, Object[] params) { manager.mayThrow(() -> manager.getCircuitBoard().updateComponent((ComponentPeer<?>)params[1], (ComponentPeer<?>)params[0])); } }, MOVE_ELEMENTS { protected void redo(CircuitManager manager, Object[] params) { @SuppressWarnings("unchecked") Set<GuiElement> elements = (Set<GuiElement>)params[0]; int dx = (int)params[1]; int dy = (int)params[2]; @SuppressWarnings("unchecked") Set<Wire> wiresToAdd = (Set<Wire>)params[3]; @SuppressWarnings("unchecked") Set<Wire> wiresToRemove = (Set<Wire>)params[4]; manager.mayThrow( () -> manager .getCircuitBoard() .removeElements(wiresToRemove)); manager.mayThrow(() -> manager.getCircuitBoard().initMove(elements)); manager.getCircuitBoard().moveElements(dx, dy, false); manager.mayThrow(() -> manager.getCircuitBoard().finalizeMove()); wiresToAdd.forEach(w -> manager.mayThrow( () -> manager .getCircuitBoard() .addWire(w.getX(), w.getY(), w.getLength(), w.isHorizontal()))); } protected void undo(CircuitManager manager, Object[] params) { @SuppressWarnings("unchecked") Set<GuiElement> elements = (Set<GuiElement>)params[0]; int dx = -(int)params[1]; int dy = -(int)params[2]; @SuppressWarnings("unchecked") Set<Wire> wiresToRemove = (Set<Wire>)params[3]; @SuppressWarnings("unchecked") Set<Wire> wiresToAdd = (Set<Wire>)params[4]; manager.mayThrow( () -> manager .getCircuitBoard() .removeElements(wiresToRemove)); manager.mayThrow(() -> manager.getCircuitBoard().initMove(elements)); manager.getCircuitBoard().moveElements(dx, dy, false); manager.mayThrow(() -> manager.getCircuitBoard().finalizeMove()); wiresToAdd.forEach(w -> manager.mayThrow( () -> manager .getCircuitBoard() .addWire(w.getX(), w.getY(), w.getLength(), w.isHorizontal()))); } }, REMOVE_COMPONENT { protected void redo(CircuitManager manager, Object[] params) { ADD_COMPONENT.undo(manager, params); } protected void undo(CircuitManager manager, Object[] params) { ADD_COMPONENT.redo(manager, params); } }, ADD_WIRE { protected void redo(CircuitManager manager, Object[] params) { Wire wire = (Wire)params[0]; manager.mayThrow(() -> manager.getCircuitBoard() .addWire(wire.getX(), wire.getY(), wire.getLength(), wire.isHorizontal())); } protected void undo(CircuitManager manager, Object[] params) { manager.mayThrow(() -> manager.getCircuitBoard() .removeElements(Collections.singleton((Wire)params[0]))); } }, REMOVE_WIRE { protected void redo(CircuitManager manager, Object[] params) { ADD_WIRE.undo(manager, params); } protected void undo(CircuitManager manager, Object[] params) { ADD_WIRE.redo(manager, params); } }; protected abstract void redo(CircuitManager manager, Object[] params); protected abstract void undo(CircuitManager manager, Object[] params); } private class Edit { EditAction action; CircuitManager circuitManager; Object[] params; Edit(EditAction action, CircuitManager circuitManager, Object[] params) { this.action = action; this.circuitManager = circuitManager; this.params = params; } } public interface EditListener { void edit(EditAction action, CircuitManager manager, Object[] params); } private Deque<List<Edit>> editStack; private Deque<List<Edit>> redoStack; private static final int MAX_HISTORY = 300; private List<EditListener> editListeners; public EditHistory() { editStack = new ArrayDeque<>(); redoStack = new ArrayDeque<>(); editListeners = new ArrayList<>(); } public void clear() { editStack.clear(); redoStack.clear(); } private int disableDepth = 0; public void enable() { disableDepth--; if(disableDepth < 0) { throw new IllegalStateException("This should never happen!"); } } public void disable() { disableDepth++; } public void addListener(EditListener listener) { editListeners.add(listener); } public void removeListener(EditListener listener) { editListeners.remove(listener); } private int groupDepth = 0; private List<List<Edit>> groups; public void beginGroup() { groupDepth++; if(groups == null) { if(groupDepth != 1) throw new IllegalStateException("How the hell did this happen??"); groups = new ArrayList<>(); } groups.add(new ArrayList<>()); } public void endGroup() { if(groupDepth == 0) throw new IllegalStateException("Mismatched call to endGroup."); groupDepth--; if(groupDepth == 0) { if(groups == null) throw new IllegalStateException("This can't be null?!"); List<Edit> edits = groups.stream().flatMap(Collection::stream).collect(Collectors.toList()); if(!edits.isEmpty()) { editStack.push(edits); if(editStack.size() > MAX_HISTORY) { editStack.removeLast(); } } groups = null; } } public void clearGroup() { if(groups == null) throw new IllegalStateException("No group started"); groups.get(groupDepth - 1).clear(); groups.subList(groupDepth, groups.size()).clear(); } public void addAction(EditAction action, CircuitManager manager, Object... params) { if(disableDepth == 0) { beginGroup(); groups.get(groupDepth - 1).add(new Edit(action, manager, params)); endGroup(); redoStack.clear(); editListeners.forEach(listener -> listener.edit(action, manager, params)); } } public int editStackSize() { return editStack.size() + (groups == null || groups.isEmpty() ? 0 : 1); } public int redoStackSize() { return redoStack.size(); } public CircuitManager undo() { if(editStack.isEmpty()) { return null; } disable(); List<Edit> popped = editStack.pop(); redoStack.push(popped); for(int i = popped.size() - 1; i >= 0; i--) { Edit edit = popped.get(i); edit.action.undo(edit.circuitManager, edit.params); editListeners.forEach(listener -> listener.edit(edit.action, edit.circuitManager, edit.params)); } enable(); return popped.get(0).circuitManager; } public CircuitManager redo() { if(redoStack.isEmpty()) { return null; } disable(); List<Edit> popped = redoStack.pop(); editStack.push(popped); if(editStack.size() > MAX_HISTORY) { editStack.removeLast(); } for(Edit edit : popped) { edit.action.redo(edit.circuitManager, edit.params); editListeners.forEach(listener -> listener.edit(edit.action, edit.circuitManager, edit.params)); } enable(); return popped.get(0).circuitManager; } }
Fix bug with groups in EditHistory.
src/com/ra4king/circuitsim/gui/EditHistory.java
Fix bug with groups in EditHistory.
Java
mit
b127a08956822a978c5872a8848a43d7501be5fc
0
kbase/workspace_deluxe,kbase/workspace_deluxe,kbase/workspace_deluxe,MrCreosote/workspace_deluxe,kbase/workspace_deluxe,MrCreosote/workspace_deluxe,kbase/workspace_deluxe,MrCreosote/workspace_deluxe,MrCreosote/workspace_deluxe,MrCreosote/workspace_deluxe,kbase/workspace_deluxe,MrCreosote/workspace_deluxe,kbase/workspace_deluxe,MrCreosote/workspace_deluxe
package us.kbase.workspace.test.workspace; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.File; import java.io.StringReader; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeSet; import junit.framework.Assert; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.io.IOUtils; import org.junit.Test; import us.kbase.common.service.JsonTokenStream; import us.kbase.typedobj.core.AbsoluteTypeDefId; import us.kbase.typedobj.core.ObjectPaths; import us.kbase.typedobj.core.TempFileListener; import us.kbase.typedobj.core.TypeDefId; import us.kbase.typedobj.core.TypeDefName; import us.kbase.typedobj.db.FuncDetailedInfo; import us.kbase.typedobj.db.ModuleDefId; import us.kbase.typedobj.db.TypeDetailedInfo; import us.kbase.typedobj.exceptions.NoSuchFuncException; import us.kbase.typedobj.exceptions.NoSuchModuleException; import us.kbase.typedobj.exceptions.NoSuchPrivilegeException; import us.kbase.typedobj.exceptions.NoSuchTypeException; import us.kbase.typedobj.exceptions.TypedObjectExtractionException; import us.kbase.typedobj.exceptions.TypedObjectValidationException; import us.kbase.typedobj.idref.IdReferenceHandlerSetFactory; import us.kbase.typedobj.idref.IdReferenceType; import us.kbase.workspace.database.AllUsers; import us.kbase.workspace.database.ListObjectsParameters; import us.kbase.workspace.database.ModuleInfo; import us.kbase.workspace.database.ObjectChain; import us.kbase.workspace.database.ObjectIDNoWSNoVer; import us.kbase.workspace.database.ObjectIDResolvedWS; import us.kbase.workspace.database.ObjectIdentifier; import us.kbase.workspace.database.ObjectInformation; import us.kbase.workspace.database.Permission; import us.kbase.workspace.database.Provenance; import us.kbase.workspace.database.Provenance.ExternalData; import us.kbase.workspace.database.Reference; import us.kbase.workspace.database.ResourceUsageConfigurationBuilder; import us.kbase.workspace.database.UncheckedUserMetadata; import us.kbase.workspace.database.WorkspaceSaveObject; import us.kbase.workspace.database.Provenance.ProvenanceAction; import us.kbase.workspace.database.ResourceUsageConfigurationBuilder.ResourceUsageConfiguration; import us.kbase.workspace.database.SubObjectIdentifier; import us.kbase.workspace.database.User; import us.kbase.workspace.database.WorkspaceIdentifier; import us.kbase.workspace.database.WorkspaceInformation; import us.kbase.workspace.database.WorkspaceObjectData; import us.kbase.workspace.database.WorkspaceObjectInformation; import us.kbase.workspace.database.WorkspaceUser; import us.kbase.workspace.database.WorkspaceUserMetadata; import us.kbase.workspace.database.WorkspaceUserMetadata.MetadataSizeException; import us.kbase.workspace.database.WorkspaceUserMetadata.MetadataKeySizeException; import us.kbase.workspace.database.WorkspaceUserMetadata.MetadataValueSizeException; import us.kbase.workspace.database.exceptions.InaccessibleObjectException; import us.kbase.workspace.database.exceptions.NoSuchObjectException; import us.kbase.workspace.database.exceptions.NoSuchReferenceException; import us.kbase.workspace.database.exceptions.NoSuchWorkspaceException; import us.kbase.workspace.database.exceptions.PreExistingWorkspaceException; import us.kbase.workspace.exceptions.WorkspaceAuthorizationException; import us.kbase.workspace.test.kbase.JSONRPCLayerTester; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; public class WorkspaceTest extends WorkspaceTester { public WorkspaceTest(String config, String backend, Integer maxMemoryUsePerCall) throws Exception { super(config, backend, maxMemoryUsePerCall); } private static final WorkspaceIdentifier lockWS = new WorkspaceIdentifier("lock"); @Test public void workspaceDescription() throws Exception { WorkspaceInformation ltinfo = ws.createWorkspace(SOMEUSER, "lt", false, LONG_TEXT, null); WorkspaceInformation ltpinfo = ws.createWorkspace(SOMEUSER, "ltp", false, LONG_TEXT_PART, null); WorkspaceInformation ltninfo = ws.createWorkspace(SOMEUSER, "ltn", false, null, null); String desc = ws.getWorkspaceDescription(SOMEUSER, new WorkspaceIdentifier("lt")); assertThat("Workspace description incorrect", desc, is(LONG_TEXT.substring(0, 1000))); desc = ws.getWorkspaceDescription(SOMEUSER, new WorkspaceIdentifier("ltp")); assertThat("Workspace description incorrect", desc, is(LONG_TEXT_PART)); desc = ws.getWorkspaceDescription(SOMEUSER, new WorkspaceIdentifier("ltn")); assertNull("Workspace description incorrect", desc); ws.setWorkspaceDescription(SOMEUSER, new WorkspaceIdentifier("lt"), LONG_TEXT_PART); ws.setWorkspaceDescription(SOMEUSER, new WorkspaceIdentifier("ltp"), null); ws.setWorkspaceDescription(SOMEUSER, new WorkspaceIdentifier("ltn"), LONG_TEXT); WorkspaceInformation ltinfo2 = ws.getWorkspaceInformation(SOMEUSER, new WorkspaceIdentifier("lt")); WorkspaceInformation ltpinfo2 = ws.getWorkspaceInformation(SOMEUSER, new WorkspaceIdentifier("ltp")); WorkspaceInformation ltninfo2 = ws.getWorkspaceInformation(SOMEUSER, new WorkspaceIdentifier("ltn")); assertTrue("date updated on set ws desc", ltinfo2.getModDate().after(ltinfo.getModDate())); assertTrue("date updated on set ws desc", ltpinfo2.getModDate().after(ltpinfo.getModDate())); assertTrue("date updated on set ws desc", ltninfo2.getModDate().after(ltninfo.getModDate())); desc = ws.getWorkspaceDescription(SOMEUSER, new WorkspaceIdentifier("lt")); assertThat("Workspace description incorrect", desc, is(LONG_TEXT_PART)); desc = ws.getWorkspaceDescription(SOMEUSER, new WorkspaceIdentifier("ltp")); assertNull("Workspace description incorrect", desc); desc = ws.getWorkspaceDescription(SOMEUSER, new WorkspaceIdentifier("ltn")); assertThat("Workspace description incorrect", desc, is(LONG_TEXT.substring(0, 1000))); WorkspaceIdentifier wsi = new WorkspaceIdentifier("lt"); failSetWSDesc(AUSER, wsi, "foo", new WorkspaceAuthorizationException( "User a may not set description on workspace lt")); failSetWSDesc(null, wsi, "foo", new WorkspaceAuthorizationException( "Anonymous users may not set description on workspace lt")); ws.setPermissions(SOMEUSER, wsi, Arrays.asList(AUSER), Permission.WRITE); failSetWSDesc(AUSER, wsi, "foo", new WorkspaceAuthorizationException( "User a may not set description on workspace lt")); ws.setPermissions(SOMEUSER, wsi, Arrays.asList(AUSER), Permission.ADMIN); ws.setWorkspaceDescription(AUSER, wsi, "wooga"); assertThat("ws desc ok", ws.getWorkspaceDescription(SOMEUSER, wsi), is("wooga")); ws.setWorkspaceDeleted(SOMEUSER, wsi, true); failSetWSDesc(SOMEUSER, wsi, "foo", new NoSuchWorkspaceException( "Workspace lt is deleted", wsi)); ws.setWorkspaceDeleted(SOMEUSER, wsi, false); failSetWSDesc(SOMEUSER, new WorkspaceIdentifier("ltfake"), "foo", new NoSuchWorkspaceException( "No workspace with name ltfake exists", wsi)); try { ws.getWorkspaceDescription(BUSER, wsi); fail("Got ws desc w/o read perms"); } catch (WorkspaceAuthorizationException e) { assertThat("exception message ok", e.getLocalizedMessage(), is("User b may not read workspace lt")); } for (Permission p: Permission.values()) { if (p.compareTo(Permission.NONE) <= 0 || p.compareTo(Permission.OWNER) >= 0) { continue; } ws.setPermissions(SOMEUSER, wsi, Arrays.asList(BUSER), p); ws.getWorkspaceDescription(BUSER, wsi); //will fail if perms are wrong } ws.lockWorkspace(SOMEUSER, wsi); failSetWSDesc(SOMEUSER, wsi, "foo", new WorkspaceAuthorizationException( "The workspace with id " + ltinfo.getId() + ", name lt, is locked and may not be modified")); } @Test public void createWorkspaceAndGetInfo() throws Exception { String wsname = "foo_.-bar"; WorkspaceInformation info = ws.createWorkspace(SOMEUSER, wsname, false, "eeswaffertheen", null); checkWSInfo(info, SOMEUSER, wsname, 0, Permission.OWNER, false, "unlocked", MT_META); long id = info.getId(); WorkspaceIdentifier wsi = new WorkspaceIdentifier(id); Date moddate = info.getModDate(); info = ws.getWorkspaceInformation(SOMEUSER, new WorkspaceIdentifier(id)); checkWSInfo(info, SOMEUSER, wsname, 0, Permission.OWNER, false, id, moddate, "unlocked", MT_META); info = ws.getWorkspaceInformation(SOMEUSER, new WorkspaceIdentifier(wsname)); checkWSInfo(info, SOMEUSER, wsname, 0, Permission.OWNER, false, id, moddate, "unlocked", MT_META); Map<String, String> meta = new HashMap<String, String>(); meta.put("foo", "bar"); meta.put("baz", "bash"); WorkspaceInformation info2 = ws.createWorkspace(SOMEUSER, "foo2", true, "eeswaffertheen2", new WorkspaceUserMetadata(meta)); checkWSInfo(info2, SOMEUSER, "foo2", 0, Permission.OWNER, true, "unlocked", meta); checkWSInfo(new WorkspaceIdentifier("foo2"), SOMEUSER, "foo2", 0, Permission.OWNER, true, info2.getId(), info2.getModDate(), "unlocked", meta); try { ws.getWorkspaceInformation(BUSER, wsi); fail("Got metadata w/o read perms"); } catch (WorkspaceAuthorizationException e) { assertThat("exception message ok", e.getLocalizedMessage(), is("User b may not read workspace " + id)); } for (Permission p: Permission.values()) { if (p.compareTo(Permission.NONE) <= 0 || p.compareTo(Permission.OWNER) >= 0) { continue; } ws.setPermissions(SOMEUSER, wsi, Arrays.asList(BUSER), p); ws.getWorkspaceInformation(BUSER, wsi); //will fail if perms are wrong } WorkspaceUser anotheruser = new WorkspaceUser("anotherfnuser"); info = ws.createWorkspace(anotheruser, "anotherfnuser:MrT", true, "Ipitythefoolthatdon'teatMrTbreakfastcereal", null); checkWSInfo(info, anotheruser, "anotherfnuser:MrT", 0, Permission.OWNER, true, "unlocked", MT_META); id = info.getId(); moddate = info.getModDate(); info = ws.getWorkspaceInformation(anotheruser, new WorkspaceIdentifier(id)); checkWSInfo(info, anotheruser, "anotherfnuser:MrT", 0, Permission.OWNER, true, id, moddate, "unlocked", MT_META); info = ws.getWorkspaceInformation(anotheruser, new WorkspaceIdentifier("anotherfnuser:MrT")); checkWSInfo(info, anotheruser, "anotherfnuser:MrT", 0, Permission.OWNER, true, id, moddate, "unlocked", MT_META); //TODO BF these tests should be in the metadata class unit tests /* Map<String, String> bigmeta = new HashMap<String, String>(); for (int i = 0; i < 141; i++) { bigmeta.put("thing" + i, TEXT100); } ws.createWorkspace(SOMEUSER, "foo3", false, "eeswaffertheen", new WorkspaceUserMetadata(bigmeta)); bigmeta.put("thing", TEXT100); try { ws.createWorkspace(SOMEUSER, "foo4", false, "eeswaffertheen", bigmeta); fail("created ws with > 16kb metadata"); } catch (IllegalArgumentException iae) { assertThat("correct exception", iae.getLocalizedMessage(), is("Metadata size of 16076 is > 16000 bytes")); }*/ ws.setGlobalPermission(anotheruser, new WorkspaceIdentifier("anotherfnuser:MrT"), Permission.NONE); ws.setGlobalPermission(SOMEUSER, new WorkspaceIdentifier("foo2"), Permission.NONE); } @Test public void workspaceMetadata() throws Exception { WorkspaceUser user = new WorkspaceUser("blahblah"); WorkspaceUser user2 = new WorkspaceUser("blahblah2"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("workspaceMetadata"); WorkspaceIdentifier wsiNo = new WorkspaceIdentifier("workspaceNoMetadata"); WorkspaceIdentifier wsiNo2 = new WorkspaceIdentifier("workspaceNoMetadata2"); Map<String, String> meta = new HashMap<String, String>(); meta.put("foo", "bar"); meta.put("foo2", "bar2"); meta.put("some", "meta"); WorkspaceInformation info = ws.createWorkspace(user, wsi.getName(), false, null, new WorkspaceUserMetadata(meta)); ws.setPermissions(user, wsi, Arrays.asList(user2), Permission.ADMIN); checkWSInfo(info, user, wsi.getName(), 0, Permission.OWNER, false, info.getId(), info.getModDate(), "unlocked", meta); checkWSInfo(wsi, user, wsi.getName(), 0, Permission.OWNER, false, info.getId(), info.getModDate(), "unlocked", meta); WorkspaceInformation infoNo = ws.createWorkspace(user, wsiNo.getName(), false, null, null); checkWSInfo(infoNo, user, wsiNo.getName(), 0, Permission.OWNER, false, infoNo.getId(), infoNo.getModDate(), "unlocked", MT_META); checkWSInfo(wsiNo, user, wsiNo.getName(), 0, Permission.OWNER, false, infoNo.getId(), infoNo.getModDate(), "unlocked", MT_META); WorkspaceInformation infoNo2 = ws.createWorkspace(user, wsiNo2.getName(), false, null, null); meta.put("foo2", "bar3"); //replace Map<String, String> putmeta = new HashMap<String, String>(); putmeta.put("foo2", "bar3"); ws.setWorkspaceMetadata(user, wsi, new WorkspaceUserMetadata(putmeta)); Date d1 = checkWSInfo(wsi, user, wsi.getName(), 0, Permission.OWNER, false, info.getId(), "unlocked", meta); meta.put("foo3", "bar4"); //new putmeta.clear(); putmeta.put("foo3", "bar4"); ws.setWorkspaceMetadata(user, wsi, new WorkspaceUserMetadata(putmeta)); Date d2 = checkWSInfo(wsi, user, wsi.getName(), 0, Permission.OWNER, false, info.getId(), "unlocked", meta); putmeta.clear(); putmeta.put("foo3", "bar5"); //replace putmeta.put("some.garbage", "with.dots"); //new putmeta.put("foo", "whoa this is new"); //replace putmeta.put("no, this part is new", "prunker"); //new meta.put("foo3", "bar5"); meta.put("some.garbage", "with.dots"); meta.put("foo", "whoa this is new"); meta.put("no, this part is new", "prunker"); ws.setWorkspaceMetadata(user, wsi, new WorkspaceUserMetadata(putmeta)); Date d3 = checkWSInfo(wsi, user, wsi.getName(), 0, Permission.OWNER, false, info.getId(), "unlocked", meta); Map<String, String> newmeta = new HashMap<String, String>(); newmeta.put("new", "meta"); ws.setWorkspaceMetadata(user, wsiNo, new WorkspaceUserMetadata(newmeta)); Date nod1 = checkWSInfo(wsiNo, user, wsiNo.getName(), 0, Permission.OWNER, false, infoNo.getId(), "unlocked", newmeta); assertDatesAscending(infoNo.getModDate(), nod1); meta.remove("foo2"); ws.removeWorkspaceMetadata(user, wsi, "foo2"); Date d4 = checkWSInfo(wsi, user, wsi.getName(), 0, Permission.OWNER, false, info.getId(), "unlocked", meta); meta.remove("some"); ws.removeWorkspaceMetadata(user2, wsi, "some"); Date d5 = checkWSInfo(wsi, user, wsi.getName(), 0, Permission.OWNER, false, info.getId(), "unlocked", meta); ws.removeWorkspaceMetadata(user, wsi, "fake"); //no effect checkWSInfo(wsi, user, wsi.getName(), 0, Permission.OWNER, false, info.getId(), d5, "unlocked", meta); assertDatesAscending(info.getModDate(), d1, d2, d3, d4, d5); checkWSInfo(wsiNo2, user, wsiNo2.getName(), 0, Permission.OWNER, false, infoNo2.getId(), infoNo2.getModDate(), "unlocked", MT_META); ws.removeWorkspaceMetadata(user, wsiNo2, "somekey"); //should do nothing checkWSInfo(wsiNo2, user, wsiNo2.getName(), 0, Permission.OWNER, false, infoNo2.getId(), infoNo2.getModDate(), "unlocked", MT_META); ws.setPermissions(user, wsi, Arrays.asList(user2), Permission.WRITE); failWSMeta(user2, wsi, "foo", "val", new WorkspaceAuthorizationException( "User blahblah2 may not alter metadata for workspace workspaceMetadata")); failWSMeta(null, wsi, "foo", "val", new WorkspaceAuthorizationException( "Anonymous users may not alter metadata for workspace workspaceMetadata")); failWSMeta(user2, new WorkspaceIdentifier("thisiswayfake"), "foo", "val", new NoSuchWorkspaceException( "No workspace with name thisiswayfake exists", wsi)); ws.setWorkspaceDeleted(user, wsi, true); failWSMeta(user, wsi, "foo", "val", new NoSuchWorkspaceException( "Workspace workspaceMetadata is deleted", wsi)); ws.setWorkspaceDeleted(user, wsi, false); putmeta.clear(); for (int i = 0; i < 147; i++) { putmeta.put("" + i, TEXT100); } ws.createWorkspace(user, "wsmetafake", false, null, new WorkspaceUserMetadata(putmeta)); //should work failWSSetMeta(user, wsi, putmeta, new IllegalArgumentException( "Updated metadata exceeds allowed size of 16000B")); ws.setWorkspaceMetadata(user, wsiNo, new WorkspaceUserMetadata(putmeta)); //should work putmeta.put("148", TEXT100); failWSSetMeta(user, wsiNo2, putmeta, new MetadataSizeException( "Metadata exceeds maximum of 16000B")); failWSSetMeta(user, wsi, null, new IllegalArgumentException( "Metadata cannot be null or empty")); failWSSetMeta(user, wsi, MT_META, new IllegalArgumentException( "Metadata cannot be null or empty")); } @Test public void createWorkspaceAndWorkspaceIdentifierWithBadInput() throws Exception { class TestRig { public final WorkspaceUser user; public final String wsname; public final String excep; public TestRig(WorkspaceUser user, String wsname, String exception) { this.user = user; this.wsname = wsname; this.excep = exception; } } WorkspaceUser crap = new WorkspaceUser("afaeaafe"); List<TestRig> userWS = new ArrayList<TestRig>(); //test a few funny chars in the ws name userWS.add(new TestRig(crap, "afe_aff*afea", "Illegal character in workspace name afe_aff*afea: *")); userWS.add(new TestRig(crap, "afe_aff%afea", "Illegal character in workspace name afe_aff%afea: %")); userWS.add(new TestRig(crap, "afeaff/af*ea", "Illegal character in workspace name afeaff/af*ea: /")); userWS.add(new TestRig(crap, "af?eaff*afea", "Illegal character in workspace name af?eaff*afea: ?")); userWS.add(new TestRig(crap, "64", "Workspace names cannot be integers: 64")); //check missing ws name userWS.add(new TestRig(crap, null, "Workspace name cannot be null or the empty string")); userWS.add(new TestRig(crap, "", "Workspace name cannot be null or the empty string")); //check long names userWS.add(new TestRig(crap, TEXT256, "Workspace name exceeds the maximum length of 255")); //check missing user and/or workspace name in compound name userWS.add(new TestRig(crap, ":", "Workspace name missing from :")); userWS.add(new TestRig(crap, "foo:", "Workspace name missing from foo:")); userWS.add(new TestRig(crap, ":foo", "User name missing from :foo")); //check multiple delims userWS.add(new TestRig(crap, "foo:a:foo", "Workspace name foo:a:foo may only contain one : delimiter")); userWS.add(new TestRig(crap, "foo::foo", "Workspace name foo::foo may only contain one : delimiter")); for (TestRig testdata: userWS) { String wksps = testdata.wsname; try { new WorkspaceIdentifier(wksps); fail(String.format("able to create workspace identifier with illegal input ws %s", wksps)); } catch (IllegalArgumentException e) { assertThat("incorrect exception message", e.getLocalizedMessage(), is(testdata.excep)); } } //check missing user userWS.add(new TestRig(null, "foo", "user cannot be null")); //user must match prefix userWS.add(new TestRig(SOMEUSER, "notauser:foo", "Workspace name notauser:foo must only contain the user name " + SOMEUSER.getUser() + " prior to the : delimiter")); //no ints userWS.add(new TestRig(new WorkspaceUser("foo"), "foo:64", "Workspace names cannot be integers: foo:64")); for (TestRig testdata: userWS) { WorkspaceUser user = testdata.user; String wksps = testdata.wsname; try { ws.createWorkspace(user, wksps, false, "iswaffertheen", null); fail(String.format("able to create workspace with illegal input user: %s ws %s", user, wksps)); } catch (IllegalArgumentException e) { assertThat("incorrect exception message", e.getLocalizedMessage(), is(testdata.excep)); } try { new WorkspaceIdentifier(wksps, user); fail(String.format("able to create workspace identifier with illegal input user: %s ws %s", user, wksps)); } catch (IllegalArgumentException e) { assertThat("incorrect exception message", e.getLocalizedMessage(), is(testdata.excep)); } } } @Test public void preExistingWorkspace() throws Exception { ws.createWorkspace(AUSER, "preexist", false, null, null); failCreateWorkspace(BUSER, "preexist", false, null, null, new PreExistingWorkspaceException("Workspace name preexist is already in use")); ws.setWorkspaceDeleted(AUSER, new WorkspaceIdentifier("preexist"), true); failCreateWorkspace(BUSER, "preexist", false, null, null, new PreExistingWorkspaceException("Workspace name preexist is already in use")); failCreateWorkspace(AUSER, "preexist", false, null, null, new PreExistingWorkspaceException( "Workspace name preexist is already in use by a deleted workspace")); } @Test public void createIllegalUser() throws Exception { try { new WorkspaceUser("*"); fail("able to create user with illegal character"); } catch (IllegalArgumentException e) { assertThat("exception message correct", e.getLocalizedMessage(), is("Illegal character in user name *: *")); } try { new WorkspaceUser(null); fail("able to create user with null"); } catch (IllegalArgumentException e) { assertThat("exception message correct", e.getLocalizedMessage(), is("Username cannot be null or the empty string")); } try { new WorkspaceUser(""); fail("able to create user with empty string"); } catch (IllegalArgumentException e) { assertThat("exception message correct", e.getLocalizedMessage(), is("Username cannot be null or the empty string")); } try { new WorkspaceUser(TEXT101); fail("able to create user with long string"); } catch (IllegalArgumentException e) { assertThat("exception message correct", e.getLocalizedMessage(), is("Username exceeds the maximum length of 100")); } try { new AllUsers('$'); fail("able to create AllUser with illegal char"); } catch (IllegalArgumentException e) { assertThat("exception message correct", e.getLocalizedMessage(), is("Disallowed character: $")); } } @Test public void setWorkspaceOwner() throws Exception { WorkspaceUser u1 = new WorkspaceUser("foo"); WorkspaceUser u2 = new WorkspaceUser("bar"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("wsfoo"); ws.createWorkspace(u1, wsi.getName(), false, null, null); Map<String, String> mt = new HashMap<String, String>(); //basic test WorkspaceInformation wsinfo = ws.setWorkspaceOwner(u1, wsi, u2, null, false); checkWSInfo(wsinfo, u2, wsi.getName(), 0L, Permission.OWNER, false, "unlocked", mt); Map<User, Permission> pexp = new HashMap<User, Permission>(); pexp.put(u1, Permission.ADMIN); pexp.put(u2, Permission.OWNER); assertThat("permissions correct", ws.getPermissions( u2, Arrays.asList(wsi)).get(0), is (pexp)); failSetWorkspaceOwner(null, wsi, u2, null, true, new IllegalArgumentException("bar already owns workspace wsfoo")); failSetWorkspaceOwner(u2, wsi, u2, null, false, new IllegalArgumentException("bar already owns workspace wsfoo")); failSetWorkspaceOwner(null, wsi, null, null, true, new NullPointerException("newUser cannot be null")); failSetWorkspaceOwner(u2, wsi, null, null, false, new NullPointerException("newUser cannot be null")); failSetWorkspaceOwner(null, null, u1, null, true, new NullPointerException("wsi cannot be null")); failSetWorkspaceOwner(u2, null, u1, null, false, new NullPointerException("wsi cannot be null")); WorkspaceIdentifier fake = new WorkspaceIdentifier("wsfoofake"); failSetWorkspaceOwner(null, fake, u2, null, true, new NoSuchWorkspaceException("No workspace with name wsfoofake exists", fake)); failSetWorkspaceOwner(u2, fake, u2, null, false, new NoSuchWorkspaceException("No workspace with name wsfoofake exists", fake)); failSetWorkspaceOwner(null, wsi, u1, null, false, new WorkspaceAuthorizationException("Anonymous users may not change the owner of workspace wsfoo")); failSetWorkspaceOwner(u1, wsi, u1, null, false, new WorkspaceAuthorizationException("User foo may not change the owner of workspace wsfoo")); //test as admin wsinfo = ws.setWorkspaceOwner(null, wsi, u1, null, true); checkWSInfo(wsinfo, u1, wsi.getName(), 0L, Permission.OWNER, false, "unlocked", mt); pexp.put(u1, Permission.OWNER); pexp.put(u2, Permission.ADMIN); assertThat("permissions correct", ws.getPermissions( u2, Arrays.asList(wsi)).get(0), is (pexp)); //test basic name change wsinfo = ws.setWorkspaceOwner(u1, wsi, u2, "wsfoonew", false); checkWSInfo(wsinfo, u2, "wsfoonew", 0L, Permission.OWNER, false, "unlocked", mt); wsi = new WorkspaceIdentifier("wsfoonew"); //illegal name change to invalid user failSetWorkspaceOwner(u2, wsi, u1, "bar:wsfoo", false, new IllegalArgumentException("Workspace name bar:wsfoo must only contain the user name foo prior to the : delimiter")); failSetWorkspaceOwner(null, wsi, u1, "bar:wsfoo", true, new IllegalArgumentException("Workspace name bar:wsfoo must only contain the user name foo prior to the : delimiter")); //test auto rename of workspace ws.renameWorkspace(u2, wsi, "bar:wsfoo"); wsi = new WorkspaceIdentifier("bar:wsfoo"); wsinfo = ws.setWorkspaceOwner(u2, wsi, u1, null, false); wsi = new WorkspaceIdentifier("foo:wsfoo"); checkWSInfo(wsinfo, u1, wsi.getName(), 0L, Permission.OWNER, false, "unlocked", mt); //test manual rename of workspace wsinfo = ws.setWorkspaceOwner(u1, wsi, u2, "bar:wsfoo", false); wsi = new WorkspaceIdentifier("bar:wsfoo"); checkWSInfo(wsinfo, u2, wsi.getName(), 0L, Permission.OWNER, false, "unlocked", mt); //test rename to preexisting workspace ws.createWorkspace(u1, "foo:wsfoo2", false, null, null); failSetWorkspaceOwner(u2, wsi, u1, "foo:wsfoo2", false, new IllegalArgumentException("There is already a workspace named foo:wsfoo2")); failSetWorkspaceOwner(null, wsi, u1, "foo:wsfoo2", true, new IllegalArgumentException("There is already a workspace named foo:wsfoo2")); //test rename with same name ws.renameWorkspace(u2, wsi, "wsfoo"); wsi = new WorkspaceIdentifier("wsfoo"); wsinfo = ws.setWorkspaceOwner(u2, wsi, u1, "wsfoo", false); checkWSInfo(wsinfo, u1, wsi.getName(), 0L, Permission.OWNER, false, "unlocked", mt); } private void failSetWorkspaceOwner(WorkspaceUser user, WorkspaceIdentifier wsi, WorkspaceUser newuser, String name, boolean asAdmin, Exception expected) throws Exception { try { ws.setWorkspaceOwner(user, wsi, newuser, name, asAdmin); fail("expected set owner to fail"); } catch (Exception got) { assertThat("correct exception", got.getLocalizedMessage(), is(expected.getLocalizedMessage())); assertThat("correct exception type", got, is(expected.getClass())); } } @Test public void permissionsBulk() throws Exception { /* This test was added after the getPermissions method was converted * to take a list of workspaces rather than a single workspace. * Hence it mostly tests the aspects of the method dealing with * multiple workspaces - the prior tests, which exercise the same * method, test the remainder of the functionality. */ WorkspaceIdentifier wiow = new WorkspaceIdentifier("permmass-owner"); WorkspaceIdentifier wiad = new WorkspaceIdentifier("permmass-admin"); WorkspaceIdentifier wiwr = new WorkspaceIdentifier("permmass-write"); WorkspaceIdentifier wird = new WorkspaceIdentifier("permmass-read"); WorkspaceIdentifier wigr = new WorkspaceIdentifier("permmass-globalread"); WorkspaceIdentifier wino = new WorkspaceIdentifier("permmass-none"); ws.createWorkspace(AUSER, wiow.getName(), false, null, null).getId(); ws.createWorkspace(BUSER, wiad.getName(), false, null, null).getId(); ws.createWorkspace(BUSER, wiwr.getName(), false, null, null).getId(); ws.createWorkspace(CUSER, wird.getName(), false, null, null).getId(); ws.createWorkspace(CUSER, wigr.getName(), false, null, null).getId(); ws.createWorkspace(CUSER, wino.getName(), false, null, null).getId(); ws.setPermissions(BUSER, wiad, Arrays.asList(AUSER), Permission.ADMIN); ws.setPermissions(BUSER, wiwr, Arrays.asList(AUSER), Permission.WRITE); ws.setPermissions(CUSER, wird, Arrays.asList(AUSER), Permission.READ); ws.setGlobalPermission(CUSER, wigr, Permission.READ); List<WorkspaceIdentifier> wsis = new LinkedList<WorkspaceIdentifier>( Arrays.asList(wiow, wiad, wiwr, wird, wigr, wino)); Map<User, Permission> e1 = new HashMap<User, Permission>(); e1.put(AUSER, Permission.OWNER); Map<User, Permission> e2 = new HashMap<User, Permission>(); e2.put(AUSER, Permission.ADMIN); e2.put(BUSER, Permission.OWNER); Map<User, Permission> e3 = new HashMap<User, Permission>(); e3.put(AUSER, Permission.WRITE); e3.put(BUSER, Permission.OWNER); Map<User, Permission> e4 = new HashMap<User, Permission>(); e4.put(AUSER, Permission.READ); Map<User, Permission> e5 = new HashMap<User, Permission>(); e5.put(AUSER, Permission.NONE); e5.put(STARUSER, Permission.READ); Map<User, Permission> e6 = new HashMap<User, Permission>(); e6.put(AUSER, Permission.NONE); List<Map<User, Permission>> exp = Arrays.asList(e1, e2, e3, e4, e5, e6); List<Map<User, Permission>> got = ws.getPermissions(AUSER, wsis); assertThat("got correct mass permissions", got, is(exp)); ws.setGlobalPermission(CUSER, wigr, Permission.NONE); failGetPermissions(AUSER, null, new NullPointerException( "wslist cannot be null")); List<WorkspaceIdentifier> huge = new LinkedList<WorkspaceIdentifier>(); for (int i = 1; i <= 1002; i++) { huge.add(new WorkspaceIdentifier(i)); } failGetPermissions(AUSER, huge, new IllegalArgumentException( "Maximum number of workspaces allowed for input is 1000")); ws.setWorkspaceDeleted(AUSER, wiow, true); failGetPermissions(AUSER, wsis, new NoSuchWorkspaceException( String.format("Workspace %s is deleted", wiow.getName()), wiow)); ws.setWorkspaceDeleted(AUSER, wiow, false); wsis.add(new WorkspaceIdentifier("permmass-doesntexist")); failGetPermissions(AUSER, wsis, new NoSuchWorkspaceException( "No workspace with name permmass-doesntexist exists", wiow)); wsis.add(new WorkspaceIdentifier(100000000)); failGetPermissions(AUSER, wsis, new NoSuchWorkspaceException( "No workspace with id 100000000 exists", wiow)); } @Test public void permissions() throws Exception { //setup WorkspaceIdentifier wsiNG = new WorkspaceIdentifier("perms_noglobal"); ws.createWorkspace(AUSER, "perms_noglobal", false, null, null); WorkspaceIdentifier wsiGL = new WorkspaceIdentifier("perms_global"); ws.createWorkspace(AUSER, "perms_global", true, "globaldesc", null); Map<User, Permission> expect = new HashMap<User, Permission>(); //try some illegal ops try { ws.getWorkspaceDescription(null, wsiNG); fail("Able to get private workspace description with no user name"); } catch (Exception e) { assertThat("Correct exception message", e.getLocalizedMessage(), is("Anonymous users may not read workspace perms_noglobal")); } try { ws.getWorkspaceInformation(null, wsiNG); fail("Able to get private workspace metadata with no user name"); } catch (WorkspaceAuthorizationException e) { assertThat("Correct exception message", e.getLocalizedMessage(), is("Anonymous users may not read workspace perms_noglobal")); } failSetPermissions(null, wsiNG, Arrays.asList(AUSER, BUSER, CUSER), Permission.READ, new WorkspaceAuthorizationException( "Anonymous users may not set permissions on workspace perms_noglobal")); failSetPermissions(null, wsiNG, null, Permission.READ, new IllegalArgumentException("The users list may not be null or empty")); failSetPermissions(null, wsiNG, new LinkedList<WorkspaceUser>(), Permission.READ, new IllegalArgumentException("The users list may not be null or empty")); failSetPermissions(AUSER, wsiNG, Arrays.asList(AUSER, BUSER, CUSER), Permission.OWNER, new IllegalArgumentException("Cannot set owner permission")); failSetPermissions(BUSER, wsiNG, Arrays.asList(AUSER, BUSER, CUSER), Permission.READ, new WorkspaceAuthorizationException("User b may not set permissions on workspace perms_noglobal")); //check basic permissions for new private and public workspaces expect.put(AUSER, Permission.OWNER); assertThat("ws has correct perms for owner", ws.getPermissions( AUSER, Arrays.asList(wsiNG)).get(0), is(expect)); expect.put(STARUSER, Permission.READ); assertThat("ws has correct perms for owner", ws.getPermissions( AUSER, Arrays.asList(wsiGL)).get(0), is(expect)); expect.clear(); expect.put(BUSER, Permission.NONE); assertThat("ws has correct perms for random user", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); expect.put(STARUSER, Permission.READ); assertThat("ws has correct perms for random user", ws.getPermissions( BUSER, Arrays.asList(wsiGL)).get(0), is(expect)); //test read permissions assertThat("can read public workspace description", ws.getWorkspaceDescription(null, wsiGL), is("globaldesc")); WorkspaceInformation info = ws.getWorkspaceInformation(null, wsiGL); checkWSInfo(info, AUSER, "perms_global", 0, Permission.NONE, true, "unlocked", MT_META); ws.setPermissions(AUSER, wsiNG, Arrays.asList(AUSER, BUSER, CUSER), Permission.READ); expect.clear(); expect.put(AUSER, Permission.OWNER); expect.put(BUSER, Permission.READ); expect.put(CUSER, Permission.READ); assertThat("ws doesn't replace owner perms", ws.getPermissions( AUSER, Arrays.asList(wsiNG)).get(0), is(expect)); expect.clear(); expect.put(BUSER, Permission.READ); assertThat("no permission leakage", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); failSetPermissions(BUSER, wsiNG, Arrays.asList(AUSER, BUSER, CUSER), Permission.READ, new WorkspaceAuthorizationException( "User b may not alter other user's permissions on workspace perms_noglobal")); failSetPermissions(BUSER, wsiNG, Arrays.asList(BUSER), Permission.WRITE, new WorkspaceAuthorizationException( "User b may only reduce their permission level on workspace perms_noglobal")); //asAdmin testing ws.setPermissions(BUSER, wsiNG, Arrays.asList(BUSER), Permission.ADMIN, true); expect.put(AUSER, Permission.OWNER); expect.put(BUSER, Permission.ADMIN); expect.put(CUSER, Permission.READ); assertThat("asAdmin boolean works", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); ws.setPermissions(BUSER, wsiNG, Arrays.asList(BUSER), Permission.READ); expect.clear(); expect.put(BUSER, Permission.READ); assertThat("reduce own permissions", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); ws.setPermissions(null, wsiNG, Arrays.asList(BUSER), Permission.ADMIN, true); expect.put(AUSER, Permission.OWNER); expect.put(BUSER, Permission.ADMIN); expect.put(CUSER, Permission.READ); assertThat("asAdmin boolean works with null user",ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); ws.setPermissions(AUSER, wsiNG, Arrays.asList(BUSER), Permission.READ); expect.clear(); expect.put(BUSER, Permission.READ); assertThat("reduced permissions", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); ws.setPermissions(BUSER, wsiNG, Arrays.asList(BUSER), Permission.READ); //should have no effect expect.clear(); expect.put(AUSER, Permission.OWNER); expect.put(BUSER, Permission.READ); expect.put(CUSER, Permission.READ); assertThat("user setting same perms has no effect", ws.getPermissions( AUSER, Arrays.asList(wsiNG)).get(0), is(expect)); expect.clear(); expect.put(BUSER, Permission.READ); assertThat("setting own perms to same has no effect", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); ws.setPermissions(BUSER, wsiNG, Arrays.asList(BUSER), Permission.NONE); expect.clear(); expect.put(AUSER, Permission.OWNER); expect.put(CUSER, Permission.READ); assertThat("user removed own perms", ws.getPermissions( AUSER, Arrays.asList(wsiNG)).get(0), is(expect)); expect.clear(); expect.put(BUSER, Permission.NONE); assertThat("can remove own perms", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); //test write permissions ws.setPermissions(AUSER, wsiNG, Arrays.asList(BUSER), Permission.WRITE); expect.put(AUSER, Permission.OWNER); expect.put(BUSER, Permission.WRITE); expect.put(CUSER, Permission.READ); assertThat("ws doesn't replace owner perms", ws.getPermissions( AUSER, Arrays.asList(wsiNG)).get(0), is(expect)); assertThat("write perms allow viewing all perms", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); expect.clear(); expect.put(CUSER, Permission.READ); assertThat("no permission leakage", ws.getPermissions( CUSER, Arrays.asList(wsiNG)).get(0), is(expect)); failSetPermissions(BUSER, wsiNG, Arrays.asList(AUSER, BUSER, CUSER), Permission.READ, new WorkspaceAuthorizationException( "User b may not alter other user's permissions on workspace perms_noglobal")); //test admin permissions ws.setPermissions(AUSER, wsiNG, Arrays.asList(BUSER), Permission.ADMIN); expect.put(AUSER, Permission.OWNER); expect.put(BUSER, Permission.ADMIN); expect.put(CUSER, Permission.READ); assertThat("ws doesn't replace owner perms", ws.getPermissions( AUSER, Arrays.asList(wsiNG)).get(0), is(expect)); assertThat("admin can see all perms", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); ws.setPermissions(BUSER, wsiNG, Arrays.asList(AUSER, CUSER), Permission.WRITE); expect.put(CUSER, Permission.WRITE); assertThat("ws doesn't replace owner perms", ws.getPermissions( AUSER, Arrays.asList(wsiNG)).get(0), is(expect)); assertThat("admin can correctly set perms", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); //test remove permissions ws.setPermissions(BUSER, wsiNG, Arrays.asList(AUSER, CUSER), Permission.NONE); expect.remove(CUSER); assertThat("ws doesn't replace owner perms", ws.getPermissions( AUSER, Arrays.asList(wsiNG)).get(0), is(expect)); assertThat("admin can't overwrite owner perms", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); ws.setGlobalPermission(AUSER, new WorkspaceIdentifier("perms_global"), Permission.NONE); } @Test public void permissionsWithNoUser() throws Exception { /* Tests the case that no user credentials are supplied and thus the * user is null. Only globally readable workspaces should return * permissions other than NONE. */ WorkspaceIdentifier wsiNG = new WorkspaceIdentifier("PnoU_noglobal"); ws.createWorkspace(AUSER, "PnoU_noglobal", false, null, null); WorkspaceIdentifier wsiGL = new WorkspaceIdentifier("PnoU_global"); ws.createWorkspace(AUSER, "PnoU_global", true, "globaldesc", null); Map<User, Permission> expect = new HashMap<User, Permission>(); assertThat("No permissions for private WS", ws.getPermissions(null, Arrays.asList(wsiNG)).get(0), is(expect)); expect.put(STARUSER, Permission.READ); assertThat("Read permissions for public WS", ws.getPermissions(null, Arrays.asList(wsiGL)).get(0), is(expect)); ws.setGlobalPermission(AUSER, wsiGL, Permission.NONE); } @Test public void saveObjectsAndGetMetaSimple() throws Exception { WorkspaceUser foo = new WorkspaceUser("foo"); WorkspaceUser bar = new WorkspaceUser("bar"); IdReferenceHandlerSetFactory foofac = getIdFactory(); IdReferenceHandlerSetFactory barfac = getIdFactory(); WorkspaceIdentifier read = new WorkspaceIdentifier("saveobjread"); WorkspaceIdentifier priv = new WorkspaceIdentifier("saveobj"); WorkspaceInformation readinfo = ws.createWorkspace( foo, read.getIdentifierString(), true, null, null); WorkspaceInformation privinfo = ws.createWorkspace( foo, priv.getIdentifierString(), false, null, null); Date readLastDate = readinfo.getModDate(); Date privLastDate = privinfo.getModDate(); long readid = readinfo.getId(); long privid = privinfo.getId(); Map<String, Object> data = new HashMap<String, Object>(); Map<String, Object> data2 = new HashMap<String, Object>(); Map<String, String> premeta = new HashMap<String, String>(); Map<String, Object> moredata = new HashMap<String, Object>(); moredata.put("foo", "bar"); data.put("fubar", moredata); JsonNode savedata = MAPPER.valueToTree(data); data2.put("fubar2", moredata); JsonNode savedata2 = MAPPER.valueToTree(data2); premeta.put("metastuff", "meta"); WorkspaceUserMetadata meta = new WorkspaceUserMetadata(premeta); Map<String, String> premeta2 = new HashMap<String, String>(); premeta2.put("meta2", "my hovercraft is full of eels"); WorkspaceUserMetadata meta2 = new WorkspaceUserMetadata(premeta2); Provenance p = new Provenance(new WorkspaceUser("kbasetest2")); p.addAction(new Provenance.ProvenanceAction().withServiceName("some service")); List<WorkspaceSaveObject> objects = new ArrayList<WorkspaceSaveObject>(); try { ws.saveObjects(foo, read, objects, foofac); fail("Saved no objects"); } catch (IllegalArgumentException e) { assertThat("correct except", e.getLocalizedMessage(), is("No data provided")); } failGetObjects(foo, new ArrayList<ObjectIdentifier>(), new IllegalArgumentException( "No object identifiers provided")); try { ws.getObjectInformation(foo, new ArrayList<ObjectIdentifier>(), true, false); fail("called method with no identifiers"); } catch (IllegalArgumentException e) { assertThat("correct except", e.getLocalizedMessage(), is("No object identifiers provided")); } objects.add(new WorkspaceSaveObject(new ObjectIDNoWSNoVer("auto3"), savedata, SAFE_TYPE1, meta, p, false)); objects.add(new WorkspaceSaveObject(new ObjectIDNoWSNoVer("auto3"), savedata2, SAFE_TYPE1, meta2, p, false)); objects.add(new WorkspaceSaveObject(new ObjectIDNoWSNoVer("auto3-1"), savedata, SAFE_TYPE1, meta, p, false)); objects.add(new WorkspaceSaveObject(savedata2, SAFE_TYPE1, meta2, p, false)); objects.add(new WorkspaceSaveObject(savedata, SAFE_TYPE1, meta, p, false)); readLastDate = ws.getWorkspaceInformation(foo, read).getModDate(); List<ObjectInformation> objinfo = ws.saveObjects(foo, read, objects, foofac); readLastDate = assertWorkspaceDateUpdated(foo, read, readLastDate, "ws date modified on save"); String chksum1 = "36c4f68f2c98971b9736839232eb08f4"; String chksum2 = "3c59f762140806c36ab48a152f28e840"; checkObjInfo(objinfo.get(0), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, premeta); checkObjInfo(objinfo.get(1), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo.get(2), 2, "auto3-1", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, premeta); checkObjInfo(objinfo.get(3), 3, "auto3-2", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo.get(4), 4, "auto4", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, premeta); List<ObjectIdentifier> loi = new ArrayList<ObjectIdentifier>(); loi.add(new ObjectIdentifier(read, 1)); loi.add(new ObjectIdentifier(read, 1, 1)); loi.add(new ObjectIdentifier(new WorkspaceIdentifier(readid), "auto3")); loi.add(new ObjectIdentifier(new WorkspaceIdentifier(readid), "auto3", 1)); loi.add(new ObjectIdentifier(new WorkspaceIdentifier(readid), 1)); loi.add(new ObjectIdentifier(new WorkspaceIdentifier(readid), 1, 1)); loi.add(new ObjectIdentifier(read, "auto3")); loi.add(new ObjectIdentifier(read, "auto3", 1)); loi.add(new ObjectIdentifier(read, "auto3-2")); loi.add(new ObjectIdentifier(read, 3)); loi.add(new ObjectIdentifier(read, "auto3-2", 1)); loi.add(new ObjectIdentifier(read, 3, 1)); List<ObjectInformation> objinfo2 = ws.getObjectInformation(foo, loi, true, false); List<ObjectInformation> objinfo2NoMeta = ws.getObjectInformation(foo, loi, false, false); checkObjInfo(objinfo2.get(0), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo2.get(1), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, premeta); checkObjInfo(objinfo2.get(2), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo2.get(3), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, premeta); checkObjInfo(objinfo2.get(4), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo2.get(5), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, premeta); checkObjInfo(objinfo2.get(6), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo2.get(7), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, premeta); checkObjInfo(objinfo2.get(8), 3, "auto3-2", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo2.get(9), 3, "auto3-2", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo2.get(10), 3, "auto3-2", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo2.get(11), 3, "auto3-2", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo2NoMeta.get(0), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, null); checkObjInfo(objinfo2NoMeta.get(1), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, null); checkObjInfo(objinfo2NoMeta.get(2), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, null); checkObjInfo(objinfo2NoMeta.get(3), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, null); checkObjInfo(objinfo2NoMeta.get(4), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, null); checkObjInfo(objinfo2NoMeta.get(5), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, null); checkObjInfo(objinfo2NoMeta.get(6), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, null); checkObjInfo(objinfo2NoMeta.get(7), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, null); checkObjInfo(objinfo2NoMeta.get(8), 3, "auto3-2", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum2, 24, null); checkObjInfo(objinfo2NoMeta.get(9), 3, "auto3-2", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum2, 24, null); checkObjInfo(objinfo2NoMeta.get(10), 3, "auto3-2", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum2, 24, null); checkObjInfo(objinfo2NoMeta.get(11), 3, "auto3-2", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum2, 24, null); List<FakeObjectInfo> retinfo = new ArrayList<FakeObjectInfo>(); FakeResolvedWSID fakews = new FakeResolvedWSID(read.getName(), readid); UncheckedUserMetadata umeta = new UncheckedUserMetadata(meta); UncheckedUserMetadata umeta2 = new UncheckedUserMetadata(meta2); retinfo.add(new FakeObjectInfo(1L, "auto3", SAFE_TYPE1.getTypeString(), new Date(), 2, foo, fakews, chksum2, 24L, umeta2)); retinfo.add(new FakeObjectInfo(1, "auto3", SAFE_TYPE1.getTypeString(), new Date(), 1, foo, fakews, chksum1, 23, umeta)); retinfo.add(new FakeObjectInfo(1, "auto3", SAFE_TYPE1.getTypeString(), new Date(), 2, foo, fakews, chksum2, 24, umeta2)); retinfo.add(new FakeObjectInfo(1, "auto3", SAFE_TYPE1.getTypeString(), new Date(), 1, foo, fakews, chksum1, 23, umeta)); retinfo.add(new FakeObjectInfo(1, "auto3", SAFE_TYPE1.getTypeString(), new Date(), 2, foo, fakews, chksum2, 24, umeta2)); retinfo.add(new FakeObjectInfo(1, "auto3", SAFE_TYPE1.getTypeString(), new Date(), 1, foo, fakews, chksum1, 23, umeta)); retinfo.add(new FakeObjectInfo(1, "auto3", SAFE_TYPE1.getTypeString(), new Date(), 2, foo, fakews, chksum2, 24, umeta2)); retinfo.add(new FakeObjectInfo(1, "auto3", SAFE_TYPE1.getTypeString(), new Date(), 1, foo, fakews, chksum1, 23, umeta)); retinfo.add(new FakeObjectInfo(3, "auto3-2", SAFE_TYPE1.getTypeString(), new Date(), 1, foo, fakews, chksum2, 24, umeta2)); retinfo.add(new FakeObjectInfo(3, "auto3-2", SAFE_TYPE1.getTypeString(), new Date(), 1, foo, fakews, chksum2, 24, umeta2)); retinfo.add(new FakeObjectInfo(3, "auto3-2", SAFE_TYPE1.getTypeString(), new Date(), 1, foo, fakews, chksum2, 24, umeta2)); retinfo.add(new FakeObjectInfo(3, "auto3-2", SAFE_TYPE1.getTypeString(), new Date(), 1, foo, fakews, chksum2, 24, umeta2)); List<Map<String, Object>> retdata = Arrays.asList( data2, data, data2, data, data2, data, data2, data, data2, data2, data2, data2); checkObjectAndInfo(foo, loi, retinfo, retdata); privLastDate = ws.getWorkspaceInformation(foo, priv).getModDate(); ws.saveObjects(foo, priv, objects, foofac); privLastDate = assertWorkspaceDateUpdated(foo, read, privLastDate, "ws date modified on save"); objects.clear(); objects.add(new WorkspaceSaveObject(new ObjectIDNoWSNoVer(2), savedata, SAFE_TYPE1, meta2, p, false)); objinfo = ws.saveObjects(foo, read, objects, foofac); ws.saveObjects(foo, priv, objects, foofac); checkObjInfo(objinfo.get(0), 2, "auto3-1", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum1, 23, premeta2); objinfo2 = ws.getObjectInformation(foo, Arrays.asList(new ObjectIdentifier(read, 2)), true, false); checkObjInfo(objinfo2.get(0), 2, "auto3-1", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum1, 23, premeta2); ws.getObjectInformation(bar, Arrays.asList(new ObjectIdentifier(read, 2)), true, false); //should work try { ws.getObjectInformation(bar, Arrays.asList(new ObjectIdentifier(priv, 2)), true, false); fail("Able to get obj meta from private workspace"); } catch (InaccessibleObjectException ioe) { assertThat("correct exception message", ioe.getLocalizedMessage(), is("Object 2 cannot be accessed: User bar may not read workspace saveobj")); assertThat("correct object returned", ioe.getInaccessibleObject(), is(new ObjectIdentifier(priv, 2))); } successGetObjects(bar, Arrays.asList(new ObjectIdentifier(read, 2))); try { ws.getObjects(bar, Arrays.asList(new ObjectIdentifier(priv, 2))); fail("Able to get obj data from private workspace"); } catch (InaccessibleObjectException ioe) { assertThat("correct exception message", ioe.getLocalizedMessage(), is("Object 2 cannot be accessed: User bar may not read workspace saveobj")); assertThat("correct object returned", ioe.getInaccessibleObject(), is(new ObjectIdentifier(priv, 2))); } ws.setPermissions(foo, priv, Arrays.asList(bar), Permission.READ); objinfo2 = ws.getObjectInformation(bar, Arrays.asList(new ObjectIdentifier(priv, 2)), true, false); checkObjInfo(objinfo2.get(0), 2, "auto3-1", SAFE_TYPE1.getTypeString(), 2, foo, privid, priv.getName(), chksum1, 23, premeta2); checkObjectAndInfo(bar, Arrays.asList(new ObjectIdentifier(priv, 2)), Arrays.asList(new FakeObjectInfo(2L, "auto3-1", SAFE_TYPE1.getTypeString(), new Date(), 2, foo, new FakeResolvedWSID(priv.getName(), privid), chksum1, 23L, umeta2)), Arrays.asList(data)); failSave(bar, priv, objects, new WorkspaceAuthorizationException("User bar may not write to workspace saveobj")); ws.setPermissions(foo, priv, Arrays.asList(bar), Permission.WRITE); objinfo = ws.saveObjects(bar, priv, objects, barfac); checkObjInfo(objinfo.get(0), 2, "auto3-1", SAFE_TYPE1.getTypeString(), 3, bar, privid, priv.getName(), chksum1, 23, premeta2); failGetObjects(foo, Arrays.asList(new ObjectIdentifier(read, "booger")), new NoSuchObjectException("No object with name booger exists in workspace " + readid)); failGetObjects(foo, Arrays.asList(new ObjectIdentifier(new WorkspaceIdentifier("saveAndGetFakefake"), "booger")), new InaccessibleObjectException("Object booger cannot be accessed: No workspace with name saveAndGetFakefake exists")); ws.setPermissions(foo, priv, Arrays.asList(bar), Permission.NONE); failGetObjects(bar, Arrays.asList(new ObjectIdentifier(priv, 3)), new InaccessibleObjectException("Object 3 cannot be accessed: User bar may not read workspace saveobj")); failGetObjects(null, Arrays.asList(new ObjectIdentifier(priv, 3)), new InaccessibleObjectException("Object 3 cannot be accessed: Anonymous users may not read workspace saveobj")); //test get object info where null is returned instead of exception List<ObjectIdentifier> nullloi = new ArrayList<ObjectIdentifier>(); nullloi.add(new ObjectIdentifier(read, 1)); nullloi.add(new ObjectIdentifier(read, "booger")); nullloi.add(new ObjectIdentifier(new WorkspaceIdentifier("saveAndGetFakefake"), "booger")); nullloi.add(new ObjectIdentifier(read, 1, 1)); List<ObjectInformation> nullobjinfo = ws.getObjectInformation(foo, nullloi, true, true); checkObjInfo(nullobjinfo.get(0), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, premeta2); assertNull("Obj info is null for inaccessible object", nullobjinfo.get(1)); assertNull("Obj info is null for inaccessible object", nullobjinfo.get(2)); checkObjInfo(nullobjinfo.get(3), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, premeta); nullloi.clear(); nullloi.add(new ObjectIdentifier(new WorkspaceIdentifier(readid), "auto3")); nullloi.add(new ObjectIdentifier(priv, 2)); nullloi.add(new ObjectIdentifier(new WorkspaceIdentifier(readid), "auto3", 1)); nullloi.add(new ObjectIdentifier(priv, 3)); nullloi.add(new ObjectIdentifier(new WorkspaceIdentifier(readid), 1)); nullobjinfo = ws.getObjectInformation(bar, nullloi, false, true); checkObjInfo(nullobjinfo.get(0), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, null); assertNull("Obj info is null for inaccessible object", nullobjinfo.get(1)); checkObjInfo(nullobjinfo.get(2), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, null); assertNull("Obj info is null for inaccessible object", nullobjinfo.get(1)); checkObjInfo(nullobjinfo.get(4), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, null); nullloi.clear(); nullloi.add(new ObjectIdentifier(new WorkspaceIdentifier(readid), 1, 1)); nullloi.add(new ObjectIdentifier(priv, 3)); nullloi.add(new ObjectIdentifier(read, "auto3")); nullobjinfo = ws.getObjectInformation(null, nullloi, true, true); checkObjInfo(nullobjinfo.get(0), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, premeta); assertNull("Obj info is null for inaccessible object", nullobjinfo.get(1)); checkObjInfo(nullobjinfo.get(2), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, premeta2); ws.setObjectsDeleted(foo, Arrays.asList(new ObjectIdentifier(priv, 3)), true); ws.setWorkspaceDeleted(foo, read, true); nullobjinfo = ws.getObjectInformation(null, nullloi, true, true); assertNull("Obj info is null for inaccessible object", nullobjinfo.get(0)); assertNull("Obj info is null for inaccessible object", nullobjinfo.get(1)); assertNull("Obj info is null for inaccessible object", nullobjinfo.get(2)); ws.setWorkspaceDeleted(foo, read, false); ws.setGlobalPermission(foo, read, Permission.NONE); } @Test public void metadataExtracted() throws Exception { String module = "TestMetaData"; String spec = "module " + module + " {" + "/* @metadata ws val \n@metadata ws length(l) as Length of list*/"+ "typedef structure { string val; list<int> l; } MyType;" + "};"; WorkspaceUser userfoo = new WorkspaceUser("foo"); ws.requestModuleRegistration(userfoo, module); ws.resolveModuleRegistration(module, true); ws.compileNewTypeSpec(userfoo, spec, Arrays.asList("MyType"), null, null, false, null); TypeDefId MyType = new TypeDefId(new TypeDefName(module, "MyType"), 0, 1); WorkspaceIdentifier wspace = new WorkspaceIdentifier("metadatatest"); ws.createWorkspace(userfoo, wspace.getName(), false, null, null); Provenance emptyprov = new Provenance(userfoo); // save an object and get back object info Map<String, Object> d1 = new LinkedHashMap<String, Object>(); String val = "i should be a metadata"; d1.put("val", val); d1.put("l", Arrays.asList(1,2,3,4,5,6,7,8)); Map<String, String> metadata = new HashMap<String, String>(); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("d1"),d1, MyType, new WorkspaceUserMetadata(metadata), emptyprov, false)), getIdFactory()); List <ObjectInformation> oi = ws.getObjectInformation(userfoo, Arrays.asList(new ObjectIdentifier(wspace, "d1")), true, true); Assert.assertNotNull("Getting back an object that was saved with automatic metadata extraction", oi); Assert.assertNotNull("Getting back an object that was saved with automatic metadata extraction", oi.get(0)); // check that automatic metadata fields were populated correctly, and nothing else was added Map<String,String> savedUserMetaData = new HashMap<String, String>( oi.get(0).getUserMetaData().getMetadata()); for(Entry<String,String> m : savedUserMetaData.entrySet()) { if(m.getKey().equals("val")) Assert.assertTrue("Extracted metadata must be correct",m.getValue().equals(val)); if(m.getKey().equals("Length of list")) Assert.assertTrue("Extracted metadata must be correct",m.getValue().equals("8")); } savedUserMetaData.remove("val"); savedUserMetaData.remove("Length of list"); Assert.assertEquals("Only metadata we wanted was extracted", 0, savedUserMetaData.size()); // now we do the same thing, but make sure 1) metadata set was added, and 2) metadata is overridden // by the extracted metadata metadata.put("Length of list","i am pretty sure it was 7"); metadata.put("my_special_metadata", "yes"); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("d2"),d1, MyType, new WorkspaceUserMetadata(metadata), emptyprov, false)), getIdFactory()); List <ObjectInformation> oi2 = ws.getObjectInformation(userfoo, Arrays.asList(new ObjectIdentifier(wspace, "d2")), true, true); Assert.assertNotNull("Getting back an object that was saved with automatic metadata extraction", oi2); Assert.assertNotNull("Getting back an object that was saved with automatic metadata extraction", oi2.get(0)); savedUserMetaData = new HashMap<String, String>( oi2.get(0).getUserMetaData().getMetadata()); for(Entry<String,String> m : savedUserMetaData.entrySet()) { if(m.getKey().equals("val")) assertThat("Extracted metadata must be correct", m.getValue(), is(val)); if(m.getKey().equals("Length of list")) assertThat("Extracted metadata must be correct", m.getValue(), is("8")); if(m.getKey().equals("my_special_metadata")) assertThat("Extracted metadata must be correct", m.getValue(), is("yes")); } savedUserMetaData.remove("val"); savedUserMetaData.remove("Length of list"); savedUserMetaData.remove("my_special_metadata"); Assert.assertEquals("Only metadata we wanted was extracted", 0, savedUserMetaData.size()); } @Test public void metadataExtractedLargeTest() throws Exception { String module = "TestLargeMetadata"; String typeName = "BigMeta"; String spec = "module " + module + " {" + "/* @metadata ws val\n" + "@metadata ws length(l) as Length of list*/" + "typedef structure {" + "string val;" + "list<int> l;" + "} " + typeName + ";" + "};"; WorkspaceUser user = new WorkspaceUser("foo"); ws.requestModuleRegistration(user, module); ws.resolveModuleRegistration(module, true); ws.compileNewTypeSpec(user, spec, Arrays.asList(typeName), null, null, false, null); TypeDefId type = new TypeDefId( new TypeDefName(module, typeName), 0, 1); Provenance mtprov = new Provenance(user); WorkspaceIdentifier wsi = new WorkspaceIdentifier( "metadataExtractedLargeTest"); ws.createWorkspace(user, wsi.getName(), false, null, null); Map<String, Object> dBig = new LinkedHashMap<String, Object>(); dBig.put("l", Arrays.asList(1,2,3,4,5,6,7,8)); //test fail on large extracted values dBig.put("val", TEXT1000); saveObject(user, wsi, null, dBig, type, "foo", mtprov); //should work dBig.put("val", TEXT1000 + "f"); failSave(user, wsi, "bar", dBig, type, mtprov, new IllegalArgumentException( "Object #1, bar: Value for metadata key val exceeds maximum of 1000B: " + TEXT1000 + "f")); StringBuilder unicode = new StringBuilder(); for (int i = 0; i < 250; i++) { unicode.appendCodePoint(0x1D120); } dBig.put("val", unicode.toString()); saveObject(user, wsi, null, dBig, type, "foo", mtprov); //should work dBig.put("val", unicode.toString() + "f"); failSave(user, wsi, "bar", dBig, type, mtprov, new IllegalArgumentException( "Object #1, bar: Value for metadata key val exceeds maximum of 1000B: " + unicode.toString() + "f")); // test fail when extracted metadata > limit StringBuilder bigVal = new StringBuilder(); for (int i = 0; i < 18; i++) { bigVal.append(LONG_TEXT); //> 16kb now } dBig.put("val", bigVal.toString()); failSave(user, wsi, "bigextractedmeta", dBig, type, mtprov, new IllegalArgumentException( "Object #1, bigextractedmeta: Extracted metadata from object exceeds limit of 16000B")); } @Test public void encodings() throws Exception { WorkspaceUser user = new WorkspaceUser("encodings"); WorkspaceIdentifier wspace = new WorkspaceIdentifier("encodings"); ws.createWorkspace(user, wspace.getName(), false, null, null); Provenance emptyprov = new Provenance(user); StringBuffer sb = new StringBuffer(); sb.appendCodePoint(0x1F082); sb.append("a"); sb.appendCodePoint(0x1F0C6); sb.append("b"); sb.appendCodePoint(0x23824); sb.append("c"); sb.appendCodePoint(0x1685); sb.append("d"); sb.appendCodePoint(0x13B2); sb.append("e"); sb.appendCodePoint(0x06E9); String s = sb.toString() + sb.toString(); Map<String, Object> craycraymap = new HashMap<String, Object>(); craycraymap.put(s + "42", Arrays.asList(s, s + "woot", s)); craycraymap.put(s + "6", s); craycraymap.put(s + "3012", 1); String jsondata = MAPPER.writeValueAsString(craycraymap); List<Charset> csets = Arrays.asList(Charset.forName("UTF-8"), Charset.forName("UTF-16LE"), Charset.forName("UTF-16BE"), Charset.forName("UTF-32LE"), Charset.forName("UTF-32BE")); List<WorkspaceSaveObject> objs = new LinkedList<WorkspaceSaveObject>(); for (Charset cs: csets) { objs.add(new WorkspaceSaveObject(new JsonTokenStream(jsondata.getBytes(cs)), SAFE_TYPE1, null, emptyprov, false)); } ws.saveObjects(user, wspace, objs, getIdFactory()); List<WorkspaceObjectData> ret = ws.getObjects(user, Arrays.asList( new ObjectIdentifier(wspace, 1), new ObjectIdentifier(wspace, 2), new ObjectIdentifier(wspace, 3), new ObjectIdentifier(wspace, 4), new ObjectIdentifier(wspace, 5))); for (WorkspaceObjectData wod: ret) { assertThat("got correct object input in various encodings", wod.getData(), is((Object) craycraymap)); } } @Test public void saveNonStructuralObjects() throws Exception { String module = "TestNonStruct"; String spec = "module " + module + " {" + "typedef string type1;" + "typedef list<string> type2;" + "typedef mapping<string, string> type3;" + "typedef tuple<string, string> type4;" + "typedef structure { string val; } type5;" + "};"; WorkspaceUser userfoo = new WorkspaceUser("foo"); ws.requestModuleRegistration(userfoo, module); ws.resolveModuleRegistration(module, true); ws.compileNewTypeSpec(userfoo, spec, Arrays.asList( "type1", "type2", "type3", "type4", "type5"), null, null, false, null); TypeDefId abstype1 = new TypeDefId(new TypeDefName(module, "type1"), 0, 1); TypeDefId abstype2 = new TypeDefId(new TypeDefName(module, "type2"), 0, 1); TypeDefId abstype3 = new TypeDefId(new TypeDefName(module, "type3"), 0, 1); TypeDefId abstype4 = new TypeDefId(new TypeDefName(module, "type4"), 0, 1); TypeDefId abstype5 = new TypeDefId(new TypeDefName(module, "type5"), 0, 1); WorkspaceIdentifier wspace = new WorkspaceIdentifier("nonstruct"); ws.createWorkspace(userfoo, wspace.getName(), false, null, null); Provenance emptyprov = new Provenance(userfoo); Map<String, String> data3 = new HashMap<String, String>(); data3.put("val", "2"); try { ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject("data1", abstype1, null, emptyprov, false)), getIdFactory()); Assert.fail("Method works but shouldn't"); } catch (TypedObjectValidationException ex) { Assert.assertTrue(ex.getMessage(), ex.getMessage().contains("structure")); } try { ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(Arrays.asList("data2"), abstype2, null, emptyprov, false)), getIdFactory()); Assert.fail("Method works but shouldn't"); } catch (TypedObjectValidationException ex) { Assert.assertTrue(ex.getMessage(), ex.getMessage().contains("structure")); } try { ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data3, abstype3, null, emptyprov, false)), getIdFactory()); Assert.fail("Method works but shouldn't"); } catch (TypedObjectValidationException ex) { Assert.assertTrue(ex.getMessage(), ex.getMessage().contains("structure")); } try { ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(Arrays.asList("data4", "data4"), abstype4, null, emptyprov, false)), getIdFactory()); Assert.fail("Method works but shouldn't"); } catch (TypedObjectValidationException ex) { Assert.assertTrue(ex.getMessage(), ex.getMessage().contains("structure")); } ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data3, abstype5, null, emptyprov, false)), getIdFactory()); } @SuppressWarnings("unchecked") @Test public void saveNulls() throws Exception { String module = "TestNull"; String spec = "module " + module + " {" + "typedef structure { " + " string val1; " + " int val2; " + " float val3; " + "} type1; " + "typedef structure { " + " list<string> val; " + "} type2;" + "typedef structure { " + " mapping<string,string> val; " + "} type3;" + "typedef structure { " + " tuple<string,string> val; " + "} type4;" + "typedef structure { " + " list<int> val; " + "} type5;" + "typedef structure { " + " list<float> val; " + "} type6;" + "};"; WorkspaceUser userfoo = new WorkspaceUser("foo"); ws.requestModuleRegistration(userfoo, module); ws.resolveModuleRegistration(module, true); ws.compileNewTypeSpec(userfoo, spec, Arrays.asList( "type1", "type2", "type3", "type4", "type5", "type6"), null, null, false, null); WorkspaceIdentifier wspace = new WorkspaceIdentifier("nulls"); ws.createWorkspace(userfoo, wspace.getName(), false, null, null); Provenance emptyprov = new Provenance(userfoo); TypeDefId abstype1 = new TypeDefId(new TypeDefName(module, "type1"), 0, 1); TypeDefId abstype2 = new TypeDefId(new TypeDefName(module, "type2"), 0, 1); TypeDefId abstype3 = new TypeDefId(new TypeDefName(module, "type3"), 0, 1); TypeDefId abstype4 = new TypeDefId(new TypeDefName(module, "type4"), 0, 1); TypeDefId abstype5 = new TypeDefId(new TypeDefName(module, "type5"), 0, 1); TypeDefId abstype6 = new TypeDefId(new TypeDefName(module, "type6"), 0, 1); Set<String> keys = new TreeSet<String>(Arrays.asList("val1", "val2", "val3")); //TODO should try these tests with bytes vs. maps Map<String, Object> data1 = new LinkedHashMap<String, Object>(); data1.put("val3", null); data1.put("val2", null); data1.put("val1", null); Assert.assertEquals(keys, new TreeSet<String>(data1.keySet())); Assert.assertTrue(data1.containsKey("val1")); Assert.assertNull(data1.get("val1")); long data1id = ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data1, abstype1, null, emptyprov, false)), getIdFactory()).get(0).getObjectId(); Map<String, Object> data1copy = (Map<String, Object>)ws.getObjects(userfoo, Arrays.asList( new ObjectIdentifier(wspace, data1id))).get(0).getData(); Assert.assertEquals(keys, new TreeSet<String>(data1copy.keySet())); Map<String, Object> data2 = new LinkedHashMap<String, Object>(); data2.put("val", null); failSave(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data2, abstype2, null, emptyprov, false)), new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (null) does not match any allowed primitive type (allowed: [\"array\"]), at /val")); data2.put("val", Arrays.asList((String)null)); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data2, abstype2, null, emptyprov, false)), getIdFactory()); Map<String, Object> data3 = new LinkedHashMap<String, Object>(); data3.put("val", null); failSave(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data3, abstype3, null, emptyprov, false)), new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (null) does not match any allowed primitive type (allowed: [\"object\"]), at /val")); Map<String, Object> innerMap = new LinkedHashMap<String, Object>(); innerMap.put("key", null); data3.put("val", innerMap); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data3, abstype3, null, emptyprov, false)), getIdFactory()); innerMap.put(null, "foo"); failSave(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data3, abstype3, null, emptyprov, false)), new TypedObjectValidationException( "Object #1 failed type checking:\nKeys in maps/structures may not be null")); Map<String, Object> data4 = new LinkedHashMap<String, Object>(); data4.put("val", null); failSave(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data4, abstype4, null, emptyprov, false)), new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (null) does not match any allowed primitive type (allowed: [\"array\"]), at /val")); data4.put("val", Arrays.asList((String)null, (String)null)); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data4, abstype4, null, emptyprov, false)), getIdFactory()); Map<String, Object> data5 = new LinkedHashMap<String, Object>(); data5.put("val", Arrays.asList(2, (Integer)null, 1)); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data5, abstype5, null, emptyprov, false)), getIdFactory()); Map<String, Object> data6 = new LinkedHashMap<String, Object>(); data6.put("val", Arrays.asList(1.2, (Float)null, 3.6)); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data6, abstype6, null, emptyprov, false)), getIdFactory()); } @Test public void saveEmptyStringKey() throws Exception { WorkspaceUser user = new WorkspaceUser("foo"); WorkspaceIdentifier wspace = new WorkspaceIdentifier("saveEmptyStringKey"); ws.createWorkspace(user, wspace.getName(), false, null, null); Provenance mtprov = new Provenance(user); Map<String, Object> data = new HashMap<String, Object>(); data.put("", 3); //should work ws.saveObjects(user, wspace, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, mtprov, false) ), getIdFactory()); @SuppressWarnings("unchecked") Map<String, Object> dataObj = (Map<String, Object>) ws.getObjects(user, Arrays.asList( new ObjectIdentifier(wspace, 1))).get(0).getData(); assertThat("data saved correctly", dataObj, is(data)); } @Test public void saveObjectWithTypeChecking() throws Exception { final String specTypeCheck1 = "module TestTypeChecking {" + "/* @id ws */" + "typedef string reference;" + "typedef string some_id2;" + "/* @optional ref */ " + "typedef structure {" + "int foo;" + "list<int> bar;" + "string baz;" + "reference ref;" + "} CheckType;" + "};"; final String specTypeCheck2 = "module TestTypeChecking {" + "/* @id ws */" + "typedef string reference;" + "/* @optional ref\n" + " @optional map */" + "typedef structure {" + "int foo;" + "list<int> bar;" + "int baz;" + "reference ref;" + "mapping<string, string> map;" + "} CheckType;" + "};"; final String specTypeCheckRefs = "module TestTypeCheckingRefType {" + "/* @id ws TestTypeChecking.CheckType */" + "typedef string reference;" + "/* @optional refmap */" + "typedef structure {" + "int foo;" + "list<int> bar;" + "string baz;" + "reference ref;" + "mapping<reference, string> refmap;" + "} CheckRefType;" + "};"; String mod = "TestTypeChecking"; WorkspaceUser userfoo = new WorkspaceUser("foo"); ws.requestModuleRegistration(userfoo, mod); ws.resolveModuleRegistration(mod, true); ws.compileNewTypeSpec(userfoo, specTypeCheck1, Arrays.asList("CheckType"), null, null, false, null); TypeDefId abstype0 = new TypeDefId(new TypeDefName(mod, "CheckType"), 0, 1); TypeDefId abstype1 = new TypeDefId(new TypeDefName(mod, "CheckType"), 1, 0); TypeDefId abstype2 = new TypeDefId(new TypeDefName(mod, "CheckType"), 2, 0); TypeDefId relmintype0 = new TypeDefId(new TypeDefName(mod, "CheckType"), 0); TypeDefId relmintype1 = new TypeDefId(new TypeDefName(mod, "CheckType"), 1); TypeDefId relmintype2 = new TypeDefId(new TypeDefName(mod, "CheckType"), 2); TypeDefId relmaxtype = new TypeDefId(new TypeDefName(mod, "CheckType")); // test basic type checking with different versions WorkspaceIdentifier wspace = new WorkspaceIdentifier("typecheck"); ws.createWorkspace(userfoo, wspace.getName(), false, null, null); Provenance emptyprov = new Provenance(userfoo); Map<String, Object> data1 = new HashMap<String, Object>(); data1.put("foo", 3); data1.put("baz", "astring"); data1.put("bar", Arrays.asList(-3, 1, 234567890)); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data1, abstype0, null, emptyprov, false)), getIdFactory()); //should work failSave(userfoo, wspace, data1, new TypeDefId("NoModHere.Foo"), emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nModule doesn't exist: NoModHere")); failSave(userfoo, wspace, data1, new TypeDefId("SomeModule.Foo"), emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nUnable to locate type: SomeModule.Foo")); failSave(userfoo, wspace, data1, relmintype0, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nThis type wasn't released yet and you should be an owner to access unreleased version information")); failSave(userfoo, wspace, data1, relmintype1, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nUnable to locate type: TestTypeChecking.CheckType-1")); failSave(userfoo, wspace, data1, abstype1, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nUnable to locate type: TestTypeChecking.CheckType-1.0")); failSave(userfoo, wspace, data1, relmaxtype, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nThis type wasn't released yet and you should be an owner to access unreleased version information")); ws.releaseTypes(userfoo, mod); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, relmaxtype, null, emptyprov, false)), getIdFactory()); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, abstype0, null, emptyprov, false)), getIdFactory()); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, abstype1, null, emptyprov, false)), getIdFactory()); failSave(userfoo, wspace, data1, relmintype0, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nThis type wasn't released yet and you should be an owner to access unreleased version information")); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, relmintype1, null, emptyprov, false)), getIdFactory()); failSave(userfoo, wspace, data1, relmintype2, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nUnable to locate type: TestTypeChecking.CheckType-2")); ws.compileNewTypeSpec(userfoo, specTypeCheck2, null, null, null, false, null); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, relmaxtype, null, emptyprov, false)), getIdFactory()); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, relmintype1, null, emptyprov, false)), getIdFactory()); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, abstype0, null, emptyprov, false)), getIdFactory()); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, abstype1, null, emptyprov, false)), getIdFactory()); failSave(userfoo, wspace, data1, abstype2, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (string) does not match any allowed primitive type (allowed: [\"integer\"]), at /baz")); failSave(userfoo, wspace, data1, relmintype2, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nThis type wasn't released yet and you should be an owner to access unreleased version information")); Map<String, Object> newdata = new HashMap<String, Object>(data1); newdata.put("baz", 1); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(newdata, abstype2 , null, emptyprov, false)), getIdFactory()); failSave(userfoo, wspace, newdata, abstype0, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (integer) does not match any allowed primitive type (allowed: [\"string\"]), at /baz")); failSave(userfoo, wspace, newdata, abstype1, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (integer) does not match any allowed primitive type (allowed: [\"string\"]), at /baz")); failSave(userfoo, wspace, newdata, relmaxtype, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (integer) does not match any allowed primitive type (allowed: [\"string\"]), at /baz")); failSave(userfoo, wspace, newdata, relmintype1, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (integer) does not match any allowed primitive type (allowed: [\"string\"]), at /baz")); failSave(userfoo, wspace, newdata, relmintype2, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nThis type wasn't released yet and you should be an owner to access unreleased version information")); ws.releaseTypes(userfoo, mod); failSave(userfoo, wspace, data1, relmaxtype, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (string) does not match any allowed primitive type (allowed: [\"integer\"]), at /baz")); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, relmintype1, null, emptyprov, false)), getIdFactory()); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, abstype0, null, emptyprov, false)), getIdFactory()); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, abstype1, null, emptyprov, false)), getIdFactory()); failSave(userfoo, wspace, data1, abstype2, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (string) does not match any allowed primitive type (allowed: [\"integer\"]), at /baz")); failSave(userfoo, wspace, data1, relmintype2, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (string) does not match any allowed primitive type (allowed: [\"integer\"]), at /baz")); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(newdata, abstype2 , null, emptyprov, false)), getIdFactory()); failSave(userfoo, wspace, newdata, abstype0, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (integer) does not match any allowed primitive type (allowed: [\"string\"]), at /baz")); failSave(userfoo, wspace, newdata, abstype1, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (integer) does not match any allowed primitive type (allowed: [\"string\"]), at /baz")); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(newdata, relmaxtype, null, emptyprov, false)), getIdFactory()); failSave(userfoo, wspace, newdata, relmintype1, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (integer) does not match any allowed primitive type (allowed: [\"string\"]), at /baz")); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(newdata, relmintype2, null, emptyprov, false)), getIdFactory()); // test non-parseable references and typechecking with object count List<WorkspaceSaveObject> data = new ArrayList<WorkspaceSaveObject>(); data.add(new WorkspaceSaveObject(data1, abstype0, null, emptyprov, false)); Map<String, Object> data2 = new HashMap<String, Object>(data1); data2.put("bar", Arrays.asList(-3, 1, "anotherstring")); data.add(new WorkspaceSaveObject(data2, abstype0, null, emptyprov, false)); failSave(userfoo, wspace, data, new TypedObjectValidationException( "Object #2 failed type checking:\ninstance type (string) does not match any allowed primitive type (allowed: [\"integer\"]), at /bar/2")); data.set(1, new WorkspaceSaveObject(data2, abstype2, null, emptyprov, false)); @SuppressWarnings("unchecked") List<Integer> intlist = (List<Integer>) data2.get("bar"); intlist.set(2, 42); Map<String, Object> inner = new HashMap<String, Object>(); inner.put("amapkey", 42); data2.put("map", inner); data2.put("baz", 1); failSave(userfoo, wspace, data, new TypedObjectValidationException( "Object #2 failed type checking:\ninstance type (integer) does not match any allowed primitive type (allowed: [\"string\"]), at /map/amapkey")); Map<String, Object> data3 = new HashMap<String, Object>(data1); data3.put("ref", "typecheck/1/1"); data.set(1, new WorkspaceSaveObject(data3, abstype0, null, emptyprov, false)); ws.saveObjects(userfoo, wspace, data, getIdFactory()); //should work Map<String, Object> data4 = new HashMap<String, Object>(data1); data4.put("ref", "foo/bar/baz"); data.set(1, new WorkspaceSaveObject(data4, abstype0, null, emptyprov, false)); failSave(userfoo, wspace, data, new TypedObjectValidationException( "Object #2 has unparseable reference foo/bar/baz: Unable to parse version portion of object reference foo/bar/baz to an integer at /ref")); Map<String, Object> data5 = new HashMap<String, Object>(data1); data5.put("ref", null); data.set(1, new WorkspaceSaveObject(data5, abstype0, null, emptyprov, false)); failSave(userfoo, wspace, data, new TypedObjectValidationException( "Object #2 failed type checking:\ninstance type (null) not allowed for ID reference (allowed: [\"string\"]), at /ref")); Map<String, Object> data6 = new HashMap<String, Object>(data1); data6.put("ref", ""); data.set(1, new WorkspaceSaveObject(data6, abstype0, null, emptyprov, false)); failSave(userfoo, wspace, data, new TypedObjectValidationException( "Object #2 failed type checking:\nUnparseable id of type ws: IDs may not be null or the empty string at /ref")); Provenance goodids = new Provenance(userfoo); goodids.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList("typecheck/1/1"))); data.set(1, new WorkspaceSaveObject(data3, abstype0, null, goodids, false)); ws.saveObjects(userfoo, wspace, data, getIdFactory()); //should work Provenance badids = new Provenance(userfoo); badids.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList("foo/bar/baz"))); data.set(1, new WorkspaceSaveObject(data3, abstype0, null, badids, false)); failSave(userfoo, wspace, data, new TypedObjectValidationException( "Object #2 has unparseable provenance reference foo/bar/baz: Unable to parse version portion of object reference foo/bar/baz to an integer")); badids = new Provenance(userfoo); badids.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList((String) null))); data.set(1, new WorkspaceSaveObject(data3, abstype0, null, badids, false)); failSave(userfoo, wspace, data, new TypedObjectValidationException( "Object #2 has a null provenance reference")); badids = new Provenance(userfoo); badids.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList(""))); data.set(1, new WorkspaceSaveObject(data3, abstype0, null, badids, false)); failSave(userfoo, wspace, data, new TypedObjectValidationException( "Object #2 has invalid provenance reference: IDs may not be null or the empty string")); //test inaccessible references due to missing, deleted, or unreadable workspaces Map<String, Object> refdata = new HashMap<String, Object>(data1); refdata.put("ref", "thereisnoworkspaceofthisname/2/1"); failSave(userfoo, wspace, refdata, abstype0, emptyprov, new TypedObjectValidationException( "Object #1 has invalid reference: No read access to id thereisnoworkspaceofthisname/2/1: Object 2 cannot be accessed: No workspace with name thereisnoworkspaceofthisname exists at /ref")); Provenance nowsref = new Provenance(userfoo); nowsref.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList("thereisnoworkspaceofthisname/2/1"))); failSave(userfoo, wspace, data1, abstype0, nowsref, new TypedObjectValidationException( "Object #1 has invalid provenance reference: No read access to id thereisnoworkspaceofthisname/2/1: Object 2 cannot be accessed: No workspace with name thereisnoworkspaceofthisname exists")); ws.createWorkspace(userfoo, "tobedeleted", false, null, null); ws.setWorkspaceDeleted(userfoo, new WorkspaceIdentifier("tobedeleted"), true); refdata.put("ref", "tobedeleted/2/1"); failSave(userfoo, wspace, refdata, abstype0, emptyprov, new TypedObjectValidationException( "Object #1 has invalid reference: No read access to id tobedeleted/2/1: Object 2 cannot be accessed: Workspace tobedeleted is deleted at /ref")); Provenance delwsref = new Provenance(userfoo); delwsref.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList("tobedeleted/2/1"))); failSave(userfoo, wspace, data1, abstype0, delwsref, new TypedObjectValidationException( "Object #1 has invalid provenance reference: No read access to id tobedeleted/2/1: Object 2 cannot be accessed: Workspace tobedeleted is deleted")); ws.createWorkspace(new WorkspaceUser("stingyuser"), "stingyworkspace", false, null, null); refdata.put("ref", "stingyworkspace/2/1"); failSave(userfoo, wspace, refdata, abstype0, emptyprov, new TypedObjectValidationException( "Object #1 has invalid reference: No read access to id stingyworkspace/2/1: Object 2 cannot be accessed: User foo may not read workspace stingyworkspace at /ref")); Provenance privwsref = new Provenance(userfoo); privwsref.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList("stingyworkspace/2/1"))); failSave(userfoo, wspace, data1, abstype0, privwsref, new TypedObjectValidationException( "Object #1 has invalid provenance reference: No read access to id stingyworkspace/2/1: Object 2 cannot be accessed: User foo may not read workspace stingyworkspace")); //test inaccessible reference due to missing or deleted objects, incl bad versions ws.createWorkspace(userfoo, "referencetesting", false, null, null); WorkspaceIdentifier reftest = new WorkspaceIdentifier("referencetesting"); ws.saveObjects(userfoo, reftest, Arrays.asList( new WorkspaceSaveObject(newdata, abstype2 , null, emptyprov, false)), getIdFactory()); refdata.put("ref", "referencetesting/1/1"); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(refdata, abstype1 , null, emptyprov, false)), getIdFactory()); Provenance goodref = new Provenance(userfoo); goodref.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList("referencetesting/1/1"))); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(refdata, abstype1 , null, goodref, false)), getIdFactory()); refdata.put("ref", "referencetesting/2/1"); long refwsid = ws.getWorkspaceInformation(userfoo, reftest).getId(); failSave(userfoo, wspace, refdata, abstype0, emptyprov, new TypedObjectValidationException( "Object #1 has invalid reference: There is no object with id referencetesting/2/1: No object with id 2 exists in workspace " + refwsid + " at /ref")); Provenance noobjref = new Provenance(userfoo); noobjref.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList("referencetesting/2/1"))); failSave(userfoo, wspace, data1, abstype0, noobjref, new TypedObjectValidationException( "Object #1 has invalid provenance reference: There is no object with id referencetesting/2/1: No object with id 2 exists in workspace " + refwsid)); ws.saveObjects(userfoo, reftest, Arrays.asList( new WorkspaceSaveObject(newdata, abstype2 , null, emptyprov, false)), getIdFactory()); ws.setObjectsDeleted(userfoo, Arrays.asList(new ObjectIdentifier(reftest, 2)), true); failSave(userfoo, wspace, refdata, abstype0, emptyprov, new TypedObjectValidationException(String.format( "Object #1 has invalid reference: There is no object with id referencetesting/2/1: Object 2 (name auto2) in workspace %s has been deleted at /ref", refwsid))); Provenance delobjref = new Provenance(userfoo); delobjref.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList("referencetesting/2/1"))); failSave(userfoo, wspace, data1, abstype0, delobjref, new TypedObjectValidationException(String.format( "Object #1 has invalid provenance reference: There is no object with id referencetesting/2/1: Object 2 (name auto2) in workspace %s has been deleted", refwsid))); refdata.put("ref", "referencetesting/1/2"); failSave(userfoo, wspace, refdata, abstype0, emptyprov, new TypedObjectValidationException( "Object #1 has invalid reference: There is no object with id referencetesting/1/2: No object with id 1 (name auto1) and version 2 exists in workspace " + refwsid + " at /ref")); Provenance noverref = new Provenance(userfoo); noverref.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList("referencetesting/1/2"))); failSave(userfoo, wspace, data1, abstype0, noverref, new TypedObjectValidationException( "Object #1 has invalid provenance reference: There is no object with id referencetesting/1/2: No object with id 1 (name auto1) and version 2 exists in workspace " + refwsid)); //TODO test references against garbage collected objects //test reference type checking String refmod = "TestTypeCheckingRefType"; ws.requestModuleRegistration(userfoo, refmod); ws.resolveModuleRegistration(refmod, true); ws.compileNewTypeSpec(userfoo, specTypeCheckRefs, Arrays.asList("CheckRefType"), null, null, false, null); TypeDefId absreftype0 = new TypeDefId(new TypeDefName(refmod, "CheckRefType"), 0, 1); ws.createWorkspace(userfoo, "referencetypecheck", false, null, null); WorkspaceIdentifier reftypecheck = new WorkspaceIdentifier("referencetypecheck"); long reftypewsid = ws.getWorkspaceInformation(userfoo, reftypecheck).getId(); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(newdata, SAFE_TYPE1 , null, emptyprov, false)), getIdFactory()); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(newdata, abstype2 , null, emptyprov, false)), getIdFactory()); refdata.put("ref", "referencetypecheck/2/1"); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(refdata, absreftype0, null, emptyprov, false)), getIdFactory()); //should work refdata.put("ref", "referencetypecheck/2"); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(refdata, absreftype0, null, emptyprov, false)), getIdFactory()); //should work refdata.put("ref", "referencetypecheck/auto2/1"); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(refdata, absreftype0, null, emptyprov, false)), getIdFactory()); //should work refdata.put("ref", "referencetypecheck/auto2"); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(refdata, absreftype0, null, emptyprov, false)), getIdFactory()); //should work refdata.put("ref", reftypewsid + "/2/1"); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(refdata, absreftype0, null, emptyprov, false)), getIdFactory()); //should work refdata.put("ref", reftypewsid + "/2"); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(refdata, absreftype0, null, emptyprov, false)), getIdFactory()); //should work refdata.put("ref", reftypewsid + "/auto2/1"); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(refdata, absreftype0, null, emptyprov, false)), getIdFactory()); //should work refdata.put("ref", reftypewsid + "/auto2"); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(refdata, absreftype0, null, emptyprov, false)), getIdFactory()); //should work String err = "Object #1 has invalid reference: The type " + "SomeModule.AType-0.1 of reference %s in this object is not " + "allowed - allowed types are [TestTypeChecking.CheckType] at /ref"; refdata.put("ref", "referencetypecheck/1/1"); failSave(userfoo, reftypecheck, refdata, absreftype0, emptyprov, new TypedObjectValidationException(String.format(err, "referencetypecheck/1/1"))); refdata.put("ref", "referencetypecheck/1"); failSave(userfoo, reftypecheck, refdata, absreftype0, emptyprov, new TypedObjectValidationException(String.format(err, "referencetypecheck/1"))); refdata.put("ref", "referencetypecheck/auto1/1"); failSave(userfoo, reftypecheck, refdata, absreftype0, emptyprov, new TypedObjectValidationException(String.format(err, "referencetypecheck/auto1/1"))); refdata.put("ref", "referencetypecheck/auto1"); failSave(userfoo, reftypecheck, refdata, absreftype0, emptyprov, new TypedObjectValidationException(String.format(err, "referencetypecheck/auto1"))); refdata.put("ref", reftypewsid + "/1/1"); failSave(userfoo, reftypecheck, refdata, absreftype0, emptyprov, new TypedObjectValidationException(String.format(err, reftypewsid + "/1/1"))); refdata.put("ref", reftypewsid + "/1"); failSave(userfoo, reftypecheck, refdata, absreftype0, emptyprov, new TypedObjectValidationException(String.format(err, reftypewsid + "/1"))); refdata.put("ref", reftypewsid + "/auto1/1"); failSave(userfoo, reftypecheck, refdata, absreftype0, emptyprov, new TypedObjectValidationException(String.format(err, reftypewsid + "/auto1/1"))); refdata.put("ref", reftypewsid + "/auto1"); failSave(userfoo, reftypecheck, refdata, absreftype0, emptyprov, new TypedObjectValidationException(String.format(err, reftypewsid + "/auto1"))); //check references were rewritten correctly for (int i = 3; i < 11; i++) { WorkspaceObjectData wod = ws.getObjects(userfoo, Arrays.asList( new ObjectIdentifier(reftypecheck, i))).get(0); WorkspaceObjectData wodsub = ws.getObjectsSubSet(userfoo, Arrays.asList( new SubObjectIdentifier(new ObjectIdentifier(reftypecheck, i), null))).get(0); @SuppressWarnings("unchecked") Map<String, Object> obj = (Map<String, Object>) wod.getData(); @SuppressWarnings("unchecked") Map<String, Object> subobj = (Map<String, Object>) wodsub.getData(); assertThat("reference rewritten correctly", (String) obj.get("ref"), is(reftypewsid + "/2/1")); assertThat("reference included correctly", wod.getReferences(), is(Arrays.asList(reftypewsid + "/2/1"))); assertThat("sub obj reference rewritten correctly", (String) subobj.get("ref"), is(reftypewsid + "/2/1")); assertThat("sub obj reference included correctly", wodsub.getReferences(), is(Arrays.asList(reftypewsid + "/2/1"))); WorkspaceObjectInformation inf = ws.getObjectProvenance(userfoo, Arrays.asList( new ObjectIdentifier(reftypecheck, i))).get(0); assertThat("sub obj reference included correctly", inf.getReferences(), is(Arrays.asList(reftypewsid + "/2/1"))); } } @Test public void wsIdErrorOrder() throws Exception { //test that an id error returns the right id if multiple IDs exist WorkspaceUser user = new WorkspaceUser("user1"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("wsIdErrorOrder"); long wsid = ws.createWorkspace(user, wsi.getName(), false, null, null).getId(); List<WorkspaceSaveObject> objs = new LinkedList<WorkspaceSaveObject>(); Map<String, Object> d = new HashMap<String, Object>(); Provenance mtprov = new Provenance(user); objs.add(new WorkspaceSaveObject(d, SAFE_TYPE1, null, mtprov, false)); ws.saveObjects(user, wsi, objs, new IdReferenceHandlerSetFactory(0)); Provenance p = new Provenance(user).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList( wsi.getName() + "/auto1", wsi.getName() + "/auto2"))); objs.set(0, new WorkspaceSaveObject(d, SAFE_TYPE1, null, p, false)); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid provenance reference: There is no object with id wsIdErrorOrder/auto2: No object with name auto2 exists in workspace " + wsid)); } @Test public void duplicateAutoIds() throws Exception { WorkspaceUser user = new WorkspaceUser("user1"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("dupAutoIds"); ws.createWorkspace(user, wsi.getName(), false, null, null); List<WorkspaceSaveObject> objs = new LinkedList<WorkspaceSaveObject>(); Map<String, Object> d1 = new HashMap<String, Object>(); Map<String, Object> d2 = new HashMap<String, Object>(); d2.put("d", 2); Provenance mtprov = new Provenance(user); objs.add(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("auto5-foo"), d1, SAFE_TYPE1, null, mtprov, false)); objs.add(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("auto5-1-1"), d1, SAFE_TYPE1, null, mtprov, false)); objs.add(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("auto5"), d1, SAFE_TYPE1, null, mtprov, false)); objs.add(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("auto5-1"), d1, SAFE_TYPE1, null, mtprov, false)); objs.add(new WorkspaceSaveObject(d2, SAFE_TYPE1, null, mtprov, false)); ws.saveObjects(user, wsi, objs, new IdReferenceHandlerSetFactory(0)); WorkspaceObjectData d = ws.getObjects(user, Arrays.asList( new ObjectIdentifier(wsi, "auto5-2"))).get(0); assertThat("auto named correctly", d.getData(), is((Object) d2)); } @Test public void genericIdExtraction() throws Exception { String idtype1 = "someid"; String idtype2 = "someid2"; // String idtypeint = "someintid"; String mod = "TestIDExtraction"; String type = "IdType"; final String idSpec = "module " + mod + " {\n" + "/* @id " + idtype1 + " */\n" + "typedef string some_id;\n" + "/* @id " + idtype2 + " */\n" + "typedef string some_id2;\n" + // "/* @id " + idtypeint + " */" + // "typedef int int_id;" + "/* @optional an_id\n" + " @optional an_id2\n" + // " @optional an_int_id */" + "*/" + "typedef structure {\n" + "some_id an_id;\n" + "some_id2 an_id2;\n" + // "int_id an_int_id;" + "} " + type + ";\n" + "};\n"; WorkspaceUser user = new WorkspaceUser("foo"); ws.requestModuleRegistration(user, mod); ws.resolveModuleRegistration(mod, true); ws.compileNewTypeSpec(user, idSpec, Arrays.asList(type), null, null, false, null); TypeDefId idtype = new TypeDefId(new TypeDefName(mod, type), 0, 1); // test basic type checking with different versions WorkspaceIdentifier wsi = new WorkspaceIdentifier("idextract"); ws.createWorkspace(user, wsi.getName(), false, null, null); Provenance emptyprov = new Provenance(user); List<WorkspaceSaveObject> data = new LinkedList<WorkspaceSaveObject>(); data.add(new WorkspaceSaveObject(new HashMap<String, Object>(), idtype, null, emptyprov, false)); Map<String, Object> iddata = new HashMap<String, Object>(); IdReferenceHandlerSetFactory fac = getIdFactory().addFactory( new TestIDReferenceHandlerFactory(new IdReferenceType(idtype1))); data.add(new WorkspaceSaveObject(iddata, idtype, null, emptyprov, false)); iddata.put("an_id", "id here"); iddata.put("an_id2", "foo"); // iddata.put("an_int_id", 34); ws.saveObjects(user, wsi, data, fac); //should work Map<String, List<String>> expected = new HashMap<String, List<String>>(); ObjectIdentifier obj1 = new ObjectIdentifier(wsi, "auto1"); checkExternalIds(user, obj1, expected); expected.put(idtype1, Arrays.asList("id here")); ObjectIdentifier obj2 = new ObjectIdentifier(wsi, "auto2"); checkExternalIds(user, obj2, expected); fac.addFactory(new TestIDReferenceHandlerFactory(new IdReferenceType(idtype2))); ws.saveObjects(user, wsi, data, fac); //should work expected.put(idtype2, Arrays.asList("foo")); ObjectIdentifier obj4 = new ObjectIdentifier(wsi, "auto4"); checkExternalIds(user, obj4, expected); ObjectIdentifier copied = new ObjectIdentifier(wsi, "copied"); ws.copyObject(user, obj4, copied); checkExternalIds(user, copied, expected); WorkspaceIdentifier clone = new WorkspaceIdentifier("idextract_cloned"); ws.cloneWorkspace(user, wsi, clone.getName(), false, null, null); ObjectIdentifier clonedobj = new ObjectIdentifier(clone, "copied"); checkExternalIds(user, clonedobj, expected); ws.saveObjects(user, wsi, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("copied"), new HashMap<String, Object>(), idtype, null, emptyprov, false)), fac); ws.revertObject(user, new ObjectIdentifier(wsi, "copied", 1)); checkExternalIds(user, new ObjectIdentifier(wsi, "copied", 3), expected); expected.clear(); ws.revertObject(user, new ObjectIdentifier(wsi, "copied", 2)); checkExternalIds(user, new ObjectIdentifier(wsi, "copied", 4), expected); // //check int ids // fac.addFactory(new TestIDReferenceHandlerFactory(new IdReferenceType(idtypeint))); // // ws.saveObjects(user, wsi, data, fac); //should work // expected.put(idtype1, Arrays.asList("id here")); // expected.put(idtype2, Arrays.asList("foo")); // expected.put(idtypeint, Arrays.asList("34")); // checkExternalIds(user, new ObjectIdentifier(wsi, "auto7"), expected); // // iddata.put("an_int_id", null); // // failSave(user, wsi, data, fac, new TypedObjectValidationException( // "Object #2 failed type checking:\ninstance type (null) not allowed for ID reference (allowed: [\"integer\"]), at /an_int_id")); iddata.put("an_id", "parseExcept"); failSave(user, wsi, data, fac, new TypedObjectValidationException( "Object #2 failed type checking:\nUnparseable id parseExcept of type someid: Parse exception for ID parseExcept at /an_id")); iddata.clear(); iddata.put("an_id2", "refExcept"); failSave(user, wsi, data, fac, new TypedObjectValidationException( "Object #2 failed type checking:\nInvalid id refExcept of type someid2: Reference exception for ID refExcept at /an_id2")); iddata.clear(); iddata.put("an_id", "genExcept"); failSave(user, wsi, data, fac, new TypedObjectValidationException( "Object #2 failed type checking:\nId handling error for id type someid: General exception for ID genExcept at /an_id")); iddata.put("an_id", "procParseExcept"); failSave(user, wsi, data, fac, new TypedObjectValidationException( "Object #2 has unparseable reference procParseExcept: Process Parse exception for ID procParseExcept at /an_id")); iddata.clear(); iddata.put("an_id2", "procRefExcept"); failSave(user, wsi, data, fac, new TypedObjectValidationException( "Object #2 has invalid reference: Process Reference exception for ID procRefExcept at /an_id2")); iddata.clear(); iddata.put("an_id", "procGenExcept"); failSave(user, wsi, data, fac, new TypedObjectValidationException( "An error occured while processing IDs: Process General exception for ID procGenExcept")); } @Test public void wsIDHandling() throws Exception { String mod = "WsIDHandling"; String type = "IdType"; final String idSpec = "module " + mod + " {\n" + "/* @optional foo */\n" + "typedef structure {\n" + "int foo;\n" + "} Type1;\n" + "/* @optional foo */\n" + "typedef structure {\n" + "int foo;\n" + "} Type2;\n" + "/* @optional foo */\n" + "typedef structure {\n" + "int foo;\n" + "} Type3;\n" + "/* @id ws */\n" + "typedef string ws_any;\n" + "/* @id ws " + mod + ".Type1 */\n" + "typedef string ws_1;\n" + "/* @id ws " + mod + ".Type2 */\n" + "typedef string ws_2;\n" + "/* @id ws " + mod + ".Type3 */\n" + "typedef string ws_3;\n" + "/* @id ws " + mod + ".Type1 " + mod + ".Type2 */\n" + "typedef string ws_12;\n" + "/* @id ws " + mod + ".Type1 " + mod + ".Type3 */\n" + "typedef string ws_13;\n" + "/* @id ws " + mod + ".Type2 " + mod + ".Type3 */\n" + "typedef string ws_23;\n" + "/* @optional ws_any ws_1 ws_2 ws_3 ws_12 ws_13 ws_23 */\n" + "typedef structure {\n" + "list<ws_any> ws_any;\n" + "list<mapping<ws_1, int>> ws_1;\n" + "list<tuple<string, ws_2>> ws_2;\n" + "list<list<ws_3>> ws_3;\n" + "list<ws_12> ws_12;\n" + "list<ws_13> ws_13;\n" + "list<ws_23> ws_23;\n" + "} " + type + ";\n" + "};\n"; WorkspaceUser user = new WorkspaceUser("foo"); ws.requestModuleRegistration(user, mod); ws.resolveModuleRegistration(mod, true); ws.compileNewTypeSpec(user, idSpec, Arrays.asList(type, "Type1", "Type2", "Type3"), null, null, false, null); TypeDefId type1 = new TypeDefId(new TypeDefName(mod, "Type1"), 0, 1); TypeDefId type2 = new TypeDefId(new TypeDefName(mod, "Type2"), 0, 1); TypeDefId type3 = new TypeDefId(new TypeDefName(mod, "Type3"), 0, 1); TypeDefId idtype = new TypeDefId(new TypeDefName(mod, type), 0, 1); // test basic type checking with different versions WorkspaceIdentifier wsi = new WorkspaceIdentifier("wsIDHandling"); long wsid = ws.createWorkspace(user, wsi.getName(), false, null, null).getId(); Provenance emptyprov = new Provenance(user); List<WorkspaceSaveObject> objs = new LinkedList<WorkspaceSaveObject>(); IdReferenceHandlerSetFactory fac = new IdReferenceHandlerSetFactory(3); Map<String, Object> mt = new HashMap<String, Object>(); objs.add(new WorkspaceSaveObject(new ObjectIDNoWSNoVer("t1"), mt, type1, null, emptyprov, false)); objs.add(new WorkspaceSaveObject(new ObjectIDNoWSNoVer("t2"), mt, type2, null, emptyprov, false)); objs.add(new WorkspaceSaveObject(new ObjectIDNoWSNoVer("t3"), mt, type3, null, emptyprov, false)); ws.saveObjects(user, wsi, objs, fac); String ref1 = wsi.getName() + "/t1"; String ref2 = wsi.getName() + "/t2"; String ref3 = wsi.getName() + "/t3"; List<String> all3 = Arrays.asList(ref1, ref2, ref3); Map<String, Object> data = new HashMap<String, Object>(); data.put("ws_any", all3); Map<String, Integer> innermap = new HashMap<String, Integer>(); data.put("ws_1", Arrays.asList(innermap)); innermap.put(ref1, 3); ArrayList<List<String>> innertuple = new ArrayList<List<String>>(); data.put("ws_2", innertuple); innertuple.add(Arrays.asList("foo", ref2)); ArrayList<String> innerlist = new ArrayList<String>(); data.put("ws_3", Arrays.asList(innerlist)); innerlist.add(ref3); data.put("ws_12", Arrays.asList(ref1, ref2)); data.put("ws_13", Arrays.asList(ref1, ref3)); data.put("ws_23", Arrays.asList(ref2, ref3)); objs.clear(); objs.add(new WorkspaceSaveObject(data, idtype, null, emptyprov, false)); //should work ws.saveObjects(user, wsi, objs, fac); innermap.put(ref2, 4); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: The type WsIDHandling.Type2-0.1 of reference wsIDHandling/t2 in this object is not allowed - allowed types are [WsIDHandling.Type1] at /ws_1/0/wsIDHandling/t2")); innermap.remove(ref2); innermap.put(ref3, 6); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: The type WsIDHandling.Type3-0.1 of reference wsIDHandling/t3 in this object is not allowed - allowed types are [WsIDHandling.Type1] at /ws_1/0/wsIDHandling/t3")); innermap.remove(ref3); innertuple.add(Arrays.asList("bar", ref1)); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: The type WsIDHandling.Type1-0.1 of reference wsIDHandling/t1 in this object is not allowed - allowed types are [WsIDHandling.Type2] at /ws_2/1/1")); innertuple.clear(); innertuple.add(Arrays.asList("baz", ref3)); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: The type WsIDHandling.Type3-0.1 of reference wsIDHandling/t3 in this object is not allowed - allowed types are [WsIDHandling.Type2] at /ws_2/0/1")); innertuple.set(0, Arrays.asList("foo", ref2)); innerlist.add(ref1); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: The type WsIDHandling.Type1-0.1 of reference wsIDHandling/t1 in this object is not allowed - allowed types are [WsIDHandling.Type3] at /ws_3/0/1")); innerlist.set(1, ref3); innerlist.add(ref2); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: The type WsIDHandling.Type2-0.1 of reference wsIDHandling/t2 in this object is not allowed - allowed types are [WsIDHandling.Type3] at /ws_3/0/2")); innerlist.remove(2); innerlist.remove(1); data.put("ws_12", all3); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: The type WsIDHandling.Type3-0.1 of reference wsIDHandling/t3 in this object is not allowed - allowed types are [WsIDHandling.Type1, WsIDHandling.Type2] at /ws_12/2")); data.put("ws_12", Arrays.asList(ref1, ref2)); data.put("ws_13", all3); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: The type WsIDHandling.Type2-0.1 of reference wsIDHandling/t2 in this object is not allowed - allowed types are [WsIDHandling.Type1, WsIDHandling.Type3] at /ws_13/1")); data.put("ws_13", Arrays.asList(ref1, ref3)); data.put("ws_23", all3); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: The type WsIDHandling.Type1-0.1 of reference wsIDHandling/t1 in this object is not allowed - allowed types are [WsIDHandling.Type2, WsIDHandling.Type3] at /ws_23/0")); //test id path returns on parse and inaccessible object exceptions data.put("ws_23", Arrays.asList(ref2, ref3)); innertuple.set(0, Arrays.asList("foo", "YourMotherWasAHamster")); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has unparseable reference YourMotherWasAHamster: Illegal number of separators / in object reference YourMotherWasAHamster at /ws_2/0/1")); innertuple.set(0, Arrays.asList("foo", ref2)); data.remove("ws_any"); ws.setObjectsDeleted(user, Arrays.asList(new ObjectIdentifier(wsi, "t1")), true); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: There is no object with id wsIDHandling/t1: Object 1 (name t1) in workspace " + wsid + " has been deleted at /ws_12/0")); } @Test public void maxIdsPerCall() throws Exception { String idtype1 = "someid"; String idtype2 = "someid2"; String mod = "TestMaxId"; String listtype = "ListIdType"; final String idSpec = "module " + mod + " {\n" + "/* @id ws */\n" + "typedef string ws_id;\n" + "/* @id " + idtype1 + " */\n" + "typedef string some_id;\n" + "/* @id " + idtype2 + " */\n" + "typedef string some_id2;\n" + "/* @id " + idtype1 + " attrib1 */\n" + "typedef string some_id_a1;\n" + "/* @id " + idtype1 + " attrib2 */\n" + "typedef string some_id_a2;\n" + "/* @optional ws_ids\n" + " @optional some_ids\n" + " @optional some_ids2\n" + " @optional some_ids_a1\n" + " @optional some_ids_a2\n" + "*/\n" + "typedef structure {\n" + "list<ws_id> ws_ids;\n" + "list<some_id> some_ids;\n" + "list<some_id2> some_ids2;\n" + "list<some_id_a1> some_ids_a1;\n" + "list<some_id_a2> some_ids_a2;\n" + "} " + listtype + ";\n" + "};\n"; WorkspaceUser user = new WorkspaceUser("foo"); ws.requestModuleRegistration(user, mod); ws.resolveModuleRegistration(mod, true); ws.compileNewTypeSpec(user, idSpec, Arrays.asList(listtype), null, null, false, null); TypeDefId listidtype = new TypeDefId(new TypeDefName(mod, listtype), 0, 1); // test basic type checking with different versions WorkspaceIdentifier wsi = new WorkspaceIdentifier("maxids"); ws.createWorkspace(user, wsi.getName(), false, null, null); Provenance emptyprov = new Provenance(user); List<WorkspaceSaveObject> objs = new LinkedList<WorkspaceSaveObject>(); WorkspaceSaveObject mtobj = new WorkspaceSaveObject( new HashMap<String, String>(), listidtype, null, emptyprov, false); objs.add(mtobj); objs.add(mtobj); IdReferenceHandlerSetFactory fac = makeFacForMaxIDTests( Arrays.asList(idtype1, idtype2), user, 8); ws.saveObjects(user, wsi, objs, fac); objs.clear(); Map<String, Object> data1 = new HashMap<String, Object>(); data1.put("ws_ids", Arrays.asList("maxids/auto1", "maxids/auto2", "maxids/auto1")); data1.put("some_ids", Arrays.asList("foo", "bar", "foo")); data1.put("some_ids2", Arrays.asList("foo", "baz", "foo")); data1.put("some_ids_a1", Arrays.asList("foo", "bak", "foo")); data1.put("some_ids_a2", Arrays.asList("foo", "baf", "foo")); objs.add(new WorkspaceSaveObject(data1, listidtype, null, emptyprov, false)); //should work ws.saveObjects(user, wsi, objs, fac); fac = makeFacForMaxIDTests(Arrays.asList(idtype1, idtype2), user, 7); failSave(user, wsi, objs, fac, new TypedObjectValidationException( "Failed type checking at object #1 - the number of unique IDs in the saved objects exceeds the maximum allowed, 7")); Provenance p = new Provenance(user).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList( "maxids/auto1", "maxids/auto2", "maxids/auto1"))); fac = makeFacForMaxIDTests(Arrays.asList(idtype1, idtype2), user, 10); objs.set(0, new WorkspaceSaveObject(data1, listidtype, null, p, false)); //should work ws.saveObjects(user, wsi, objs, fac); fac = makeFacForMaxIDTests(Arrays.asList(idtype1, idtype2), user, 9); failSave(user, wsi, objs, fac, new TypedObjectValidationException( "Failed type checking at object #1 - the number of unique IDs in the saved objects exceeds the maximum allowed, 9")); objs.set(0, new WorkspaceSaveObject(data1, listidtype, null, emptyprov, false)); objs.add(new WorkspaceSaveObject(data1, listidtype, null, emptyprov, false)); fac = makeFacForMaxIDTests(Arrays.asList(idtype1, idtype2), user, 16); //should work ws.saveObjects(user, wsi, objs, fac); fac = makeFacForMaxIDTests(Arrays.asList(idtype1, idtype2), user, 15); failSave(user, wsi, objs, fac, new TypedObjectValidationException( "Failed type checking at object #2 - the number of unique IDs in the saved objects exceeds the maximum allowed, 15")); objs.set(0, new WorkspaceSaveObject(data1, listidtype, null, p, false)); objs.set(1, new WorkspaceSaveObject(data1, listidtype, null, p, false)); fac = makeFacForMaxIDTests(Arrays.asList(idtype1, idtype2), user, 20); //should work ws.saveObjects(user, wsi, objs, fac); fac = makeFacForMaxIDTests(Arrays.asList(idtype1, idtype2), user, 19); failSave(user, wsi, objs, fac, new TypedObjectValidationException( "Failed type checking at object #2 - the number of unique IDs in the saved objects exceeds the maximum allowed, 19")); } private IdReferenceHandlerSetFactory makeFacForMaxIDTests(List<String> idtypes, WorkspaceUser user, int max) { IdReferenceHandlerSetFactory fac = new IdReferenceHandlerSetFactory(max); // .addFactory(ws.getHandlerFactory(user)); for (String idtype: idtypes) { fac.addFactory(new TestIDReferenceHandlerFactory( new IdReferenceType(idtype))); } return fac; } @Test public void referenceClash() throws Exception { String mod = "TestTypeCheckingErr"; final String specTypeCheck1 = "module " + mod + " {" + "typedef structure {" + "int foo;" + "list<int> bar;" + "string baz;" + "} CheckType;" + "};"; WorkspaceUser userfoo = new WorkspaceUser("foo"); Provenance emptyprov = new Provenance(userfoo); ws.requestModuleRegistration(userfoo, mod); ws.resolveModuleRegistration(mod, true); ws.compileNewTypeSpec(userfoo, specTypeCheck1, Arrays.asList("CheckType"), null, null, false, null); ws.releaseTypes(userfoo, mod); TypeDefId abstype0 = new TypeDefId(new TypeDefName(mod, "CheckType"), 1, 0); String wsName = "reftypecheckerror"; ws.createWorkspace(userfoo, wsName, false, null, null); WorkspaceIdentifier reftypecheck = new WorkspaceIdentifier(wsName); Map<String, Object> refdata = new HashMap<String, Object>(); refdata.put("foo", 3); refdata.put("baz", "astring"); refdata.put("bar", Arrays.asList(-3, 1, 234567890)); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(refdata, abstype0 , null, emptyprov, false)), getIdFactory()); String refmod = "TestTypeCheckingRefTypeErr"; final String specTypeCheckRefs = "module " + refmod + " {" + "/* @id ws " + mod + ".CheckType */" + "typedef string reference;" + "/* @optional refmap */" + "typedef structure {" + "int foo;" + "list<int> bar;" + "string baz;" + "reference ref;" + "mapping<reference, string> refmap;" + "} CheckRefType;" + "};"; ws.requestModuleRegistration(userfoo, refmod); ws.resolveModuleRegistration(refmod, true); ws.compileNewTypeSpec(userfoo, specTypeCheckRefs, Arrays.asList("CheckRefType"), null, null, false, null); ws.releaseTypes(userfoo, refmod); TypeDefId absreftype0 = new TypeDefId(new TypeDefName(refmod, "CheckRefType"), 1, 0); long reftypewsid = ws.getWorkspaceInformation(userfoo, reftypecheck).getId(); //test the edge case where two keys in a hash resolve to the same reference refdata.put("ref", wsName + "/1/1"); Map<String, String> refmap = new HashMap<String, String>(); refmap.put(wsName + "/1/1", "pootypoot"); refmap.put(wsName + "/auto1/1", "pootypoot"); assertThat("refmap has 2 refs", refmap.size(), is(2)); refdata.put("refmap", refmap); failSave(userfoo, reftypecheck, refdata, absreftype0, emptyprov, new TypedObjectValidationException( "Object #1: Two references in a single hash are identical when resolved, resulting in a loss of data: " + "Duplicated key '" + reftypewsid + "/1/1' was found at /refmap")); } @Test public void saveProvenance() throws Exception { WorkspaceUser foo = new WorkspaceUser("foo"); WorkspaceIdentifier prov = new WorkspaceIdentifier("provenance"); ws.createWorkspace(foo, prov.getName(), false, null, null); long wsid = ws.getWorkspaceInformation(foo, prov).getId(); Map<String, Object> data = new HashMap<String, Object>(); data.put("foo", "bar"); Provenance emptyprov = new Provenance(foo); //already tested bad references in saveObjectWithTypeChecking, won't test again here ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, emptyprov, false)), getIdFactory()); ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, emptyprov, false)), getIdFactory()); ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, emptyprov, false)), getIdFactory()); ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("auto1"), data, SAFE_TYPE1, null, emptyprov, false)), getIdFactory()); ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("auto1"), data, SAFE_TYPE1, null, emptyprov, false)), getIdFactory()); List<ExternalData> ed = new LinkedList<ExternalData>(); ed.add(new ExternalData() .withDataId("data id") .withDataUrl("http://somedata.org/somedata") .withDescription("a description") .withResourceName("resource") .withResourceReleaseDate(new Date(62)) .withResourceUrl("http://somedata.org") .withResourceVersion("1.2.3") ); ed.add(new ExternalData().withDataId("data id2")); Provenance p = new Provenance(foo); p.addAction(new ProvenanceAction() .withCommandLine("A command line") .withDescription("descrip") .withIncomingArgs(Arrays.asList("a", "b", "c")) .withMethod("method") .withMethodParameters(Arrays.asList((Object) data, data, data)) .withOutgoingArgs(Arrays.asList("d", "e", "f")) .withScript("script") .withScriptVersion("2.1") .withServiceName("service") .withServiceVersion("3") .withTime(new Date(45)) .withExternalData(ed) .withWorkspaceObjects(Arrays.asList("provenance/auto3", "provenance/auto1/2"))); p.addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("provenance/auto2/1", "provenance/auto1"))); ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, p, false)), getIdFactory()); Map<String, String> refmap = new HashMap<String, String>(); refmap.put("provenance/auto3", wsid + "/3/1"); refmap.put("provenance/auto1/2", wsid + "/1/2"); refmap.put("provenance/auto2/1", wsid + "/2/1"); refmap.put("provenance/auto1", wsid + "/1/3"); checkProvenanceCorrect(foo, p, new ObjectIdentifier(prov, 4), refmap); try { new WorkspaceSaveObject(data, SAFE_TYPE1, null, null, false); fail("saved without provenance"); } catch (IllegalArgumentException iae) { assertThat("correct exception", iae.getLocalizedMessage(), is("Neither data, provenance, nor type may be null")); } try { new WorkspaceSaveObject(new ObjectIDNoWSNoVer("foo"), SAFE_TYPE1, null, null, false); fail("saved without provenance"); } catch (IllegalArgumentException iae) { assertThat("correct exception", iae.getLocalizedMessage(), is("Neither data, provenance, nor type may be null")); } try { new Provenance(null); } catch (IllegalArgumentException iae) { assertThat("correct exception", iae.getLocalizedMessage(), is("user cannot be null")); } try { Provenance pv = new Provenance(foo); pv.addAction(null); } catch (IllegalArgumentException iae) { assertThat("correct exception", iae.getLocalizedMessage(), is("action cannot be null")); } //Test minimal provenance Provenance p2 = new Provenance(foo); ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, p2, false)), getIdFactory()); List<Date> dates = checkProvenanceCorrect(foo, p2, new ObjectIdentifier(prov, 5), new HashMap<String, String>()); Provenance got2 = ws.getObjects(foo, Arrays.asList(new ObjectIdentifier(prov, 5))).get(0).getProvenance(); assertThat("Prov date constant", got2.getDate(), is(dates.get(0))); Provenance gotsub2 = ws.getObjectsSubSet(foo, Arrays.asList(new SubObjectIdentifier( new ObjectIdentifier(prov, 5), null))).get(0).getProvenance(); assertThat("Prov date constant", gotsub2.getDate(), is(dates.get(1))); assertThat("Prov dates same", got2.getDate(), is(gotsub2.getDate())); Provenance gotProv2 = ws.getObjectProvenance(foo, Arrays.asList( new ObjectIdentifier(prov, 5))).get(0).getProvenance(); assertThat("Prov date constant", gotProv2.getDate(), is(dates.get(2))); assertThat("Prov dates same", got2.getDate(), is(gotProv2.getDate())); //make sure passing nulls for ws obj lists doesn't kill anything Provenance p3 = new Provenance(foo); p3.addAction(new ProvenanceAction().withWorkspaceObjects(null)); ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, p3, false)), getIdFactory()); checkProvenanceCorrect(foo, p3, new ObjectIdentifier(prov, 6), new HashMap<String, String>()); Provenance p4 = new Provenance(foo); ProvenanceAction pa = new ProvenanceAction(); pa.setWorkspaceObjects(null); p4.addAction(pa); p3.addAction(new ProvenanceAction().withWorkspaceObjects(null)); ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, p4, false)), getIdFactory()); checkProvenanceCorrect(foo, p4, new ObjectIdentifier(prov, 7), new HashMap<String, String>()); } @Test public void saveLargeProvenance() throws Exception { WorkspaceUser foo = new WorkspaceUser("foo"); WorkspaceIdentifier prov = new WorkspaceIdentifier("bigprov"); ws.createWorkspace(foo, prov.getName(), false, null, null); Map<String, Object> data = new HashMap<String, Object>(); data.put("foo", "bar"); List<Object> methparams = new ArrayList<Object>(); for (int i = 1; i < 997; i++) { methparams.add(TEXT1000); } Provenance p = new Provenance(foo); p.addAction(new ProvenanceAction().withMethodParameters(methparams)); ws.saveObjects(foo, prov, Arrays.asList( //should work new WorkspaceSaveObject(data, SAFE_TYPE1, null, p, false)), getIdFactory()); methparams.add(TEXT1000); Provenance p2 = new Provenance(foo); p2.addAction(new ProvenanceAction().withMethodParameters(methparams)); try { ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, p, false)), getIdFactory()); fail("saved too big prov"); } catch (IllegalArgumentException iae) { assertThat("correct exception", iae.getLocalizedMessage(), is("Object #1 provenance size 1000290 exceeds limit of 1000000")); } } //TODO BF this test belongs in the user metadata test /* @Test public void bigUserMetaErrors() throws Exception { WorkspaceUser foo = new WorkspaceUser("foo"); WorkspaceIdentifier read = new WorkspaceIdentifier("bigmeta"); ws.createWorkspace(foo, read.getIdentifierString(), false, null, null); Map<String, Object> data = new HashMap<String, Object>(); Map<String, String> smallmeta = new HashMap<String, String>(); smallmeta.put("foo", "bar"); Map<String, String> meta = new HashMap<String, String>(); data.put("fubar", "bar"); JsonNode savedata = MAPPER.valueToTree(data); for (int i = 0; i < 18; i++) { meta.put(Integer.toString(i), LONG_TEXT); //> 16Mb now } try { ws.saveObjects(foo, read, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("bigmeta"), savedata, SAFE_TYPE1, meta, new Provenance(foo), false)), getIdFactory()); fail("saved object with > 16kb metadata"); } catch (IllegalArgumentException iae) { assertThat("correct exception", iae.getLocalizedMessage(), is("Metadata size of 19413 is > 16000 bytes")); } try { ws.saveObjects(foo, read, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer(3), savedata, SAFE_TYPE1, meta, new Provenance(foo), false)), getIdFactory()); fail("saved object with > 16kb metadata"); } catch (IllegalArgumentException iae) { assertThat("correct exception", iae.getLocalizedMessage(), is("Metadata size of 19413 is > 16000 bytes")); } }*/ @Test public void saveWithWrongObjectId() throws Exception { WorkspaceUser foo = new WorkspaceUser("foo"); WorkspaceIdentifier read = new WorkspaceIdentifier("wrongobjid"); ws.createWorkspace(foo, read.getIdentifierString(), false, null, null); Map<String, Object> data = new HashMap<String, Object>(); JsonNode savedata = MAPPER.valueToTree(data); try { ws.saveObjects(foo, read, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer(3), savedata, SAFE_TYPE1, null, new Provenance(foo), false)), getIdFactory()); fail("saved object with non-existant id"); } catch (NoSuchObjectException nsoe) { assertThat("correct exception", nsoe.getLocalizedMessage(), is("There is no object with id 3")); } } @Test public void unserializableData() throws Exception { WorkspaceUser foo = new WorkspaceUser("foo"); WorkspaceIdentifier read = new WorkspaceIdentifier("unserializable"); ws.createWorkspace(foo, read.getIdentifierString(), false, null, null); Object data = new StringReader("foo"); Map<String, String> meta = new HashMap<String, String>(); meta.put("foo", "bar"); try { ws.saveObjects(foo, read, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("jframe"), data, SAFE_TYPE1, new WorkspaceUserMetadata(meta), new Provenance(foo), false)), getIdFactory()); fail("saved unserializable object"); } catch (IllegalArgumentException iae) { assertThat("Actual exception: " + iae.getMessage(), iae.getMessage(), is("UObject can not serialize object of this type: java.io.StringReader")); } } @Test public void getNonexistantObjects() throws Exception { WorkspaceUser foo = new WorkspaceUser("foo"); WorkspaceIdentifier read = new WorkspaceIdentifier("nonexistantobjects"); ws.createWorkspace(foo, read.getIdentifierString(), false, null, null); long readid = ws.getWorkspaceInformation(foo, read).getId(); Map<String, Object> data = new HashMap<String, Object>(); data.put("fubar", "thingy"); JsonNode savedata = MAPPER.valueToTree(data); List<WorkspaceSaveObject> objects = new ArrayList<WorkspaceSaveObject>(); objects.add(new WorkspaceSaveObject(new ObjectIDNoWSNoVer("myname"), savedata, SAFE_TYPE1, null, new Provenance(foo), false)); ws.saveObjects(foo, read, objects, getIdFactory()); getNonExistantObject(foo, new ObjectIdentifier(read, 2), "No object with id 2 exists in workspace " + readid); getNonExistantObject(foo, new ObjectIdentifier(read, 1, 2), "No object with id 1 (name myname) and version 2 exists in workspace " + readid); getNonExistantObject(foo, new ObjectIdentifier(read, "myname2"), "No object with name myname2 exists in workspace " + readid); getNonExistantObject(foo, new ObjectIdentifier(read, "myname", 2), "No object with id 1 (name myname) and version 2 exists in workspace " + readid); } @Test public void objectIDs() throws Exception { WorkspaceIdentifier goodWs = new WorkspaceIdentifier("foo"); testObjectIdentifier("f|o.A-1_2"); testObjectIdentifier("f|o.A-1_2", 1); testObjectIdentifier(null, "foo", "wsi cannot be null"); testObjectIdentifier(goodWs, null, "Object name cannot be null or the empty string"); testObjectIdentifier(goodWs, "", "Object name cannot be null or the empty string"); testObjectIdentifier(goodWs, "f|o.A-1_2+", "Illegal character in object name f|o.A-1_2+: +"); testObjectIdentifier(goodWs, "-1", "Object names cannot be integers: -1"); testObjectIdentifier(goodWs, "15", "Object names cannot be integers: 15"); testObjectIdentifier(goodWs, "f|o.A-1_2", 0, "Object version must be > 0"); testObjectIdentifier(goodWs, TEXT256, "Object name exceeds the maximum length of 255"); testObjectIdentifier(1); testObjectIdentifier(1, 1); testObjectIdentifier(null, 1, "wsi cannot be null"); testObjectIdentifier(goodWs, 0, "Object id must be > 0"); testObjectIdentifier(goodWs, 0, 1, "Object id must be > 0"); testObjectIdentifier(goodWs, 1, 0, "Object version must be > 0"); testCreate(goodWs, "f|o.A-1_2", null); testCreate(goodWs, null, 1L); testCreate(null, "boo", null, "wsi cannot be null"); testCreate(goodWs, TEXT256, null, "Object name exceeds the maximum length of 255"); testCreate(goodWs, null, null, "Must provide one and only one of object name (was: null) or id (was: null)"); testCreate(goodWs, "boo", 1L, "Must provide one and only one of object name (was: boo) or id (was: 1)"); testCreate(goodWs, "-1", null, "Object names cannot be integers: -1"); testCreate(goodWs, "15", null, "Object names cannot be integers: 15"); testCreateVer(goodWs, "boo", null, 1); testCreateVer(goodWs, null, 1L, 1); testCreateVer(goodWs, "boo", null, null); testCreateVer(goodWs, null, 1L, null); testCreateVer(goodWs, "boo", null, 0, "Object version must be > 0"); testCreateVer(goodWs, TEXT256, null, 1, "Object name exceeds the maximum length of 255"); testCreateVer(goodWs, null, 1L, 0, "Object version must be > 0"); testRef("foo/bar"); testRef("foo/bar/1"); testRef("foo/bar/1/2", "Illegal number of separators / in object reference foo/bar/1/2"); testRef("foo/" + TEXT256 + "/1", "Object name exceeds the maximum length of 255"); testRef("foo/bar/n", "Unable to parse version portion of object reference foo/bar/n to an integer"); testRef("foo", "Illegal number of separators / in object reference foo"); testRef("1/2"); testRef("1/2/3"); testRef("1/2/3/4", "Illegal number of separators / in object reference 1/2/3/4"); testRef("1/2/n", "Unable to parse version portion of object reference 1/2/n to an integer"); testRef("1", "Illegal number of separators / in object reference 1"); testRef("foo/2"); testRef("2/foo"); testRef("foo/2/1"); testRef("2/foo/1"); } @Test public void deleteUndelete() throws Exception { WorkspaceUser user = new WorkspaceUser("deleteundelete"); WorkspaceIdentifier read = new WorkspaceIdentifier("deleteundelete"); WorkspaceInformation readinfo = ws.createWorkspace(user, read.getIdentifierString(), false, "descrip", null); long wsid = readinfo.getId(); Date lastReadDate = readinfo.getModDate(); Map<String, String> data1 = new HashMap<String, String>(); Map<String, String> data2 = new HashMap<String, String>(); data1.put("data", "1"); data2.put("data", "2"); WorkspaceSaveObject sobj1 = new WorkspaceSaveObject( new ObjectIDNoWSNoVer("obj"), data1, SAFE_TYPE1, null, new Provenance(user), false); ws.saveObjects(user, read, Arrays.asList(sobj1, new WorkspaceSaveObject(new ObjectIDNoWSNoVer("obj"), data2, SAFE_TYPE1, null, new Provenance(user), false)), getIdFactory()); ObjectIdentifier o1 = new ObjectIdentifier(read, "obj", 1); ObjectIdentifier o2 = new ObjectIdentifier(read, "obj", 2); Map<ObjectIdentifier, Object> idToData = new HashMap<ObjectIdentifier, Object>(); idToData.put(o1, data1); idToData.put(o2, data2); List<ObjectIdentifier> objs = new ArrayList<ObjectIdentifier>(idToData.keySet()); checkNonDeletedObjs(user, idToData); List<ObjectIdentifier> obj1 = new ArrayList<ObjectIdentifier>(Arrays.asList(o1)); List<ObjectIdentifier> obj2 = new ArrayList<ObjectIdentifier>(Arrays.asList(o2)); try { ws.setObjectsDeleted(new WorkspaceUser("bar"), obj1, true); fail("deleted objects w/o auth"); } catch (InaccessibleObjectException ioe) { assertThat("correct exception", ioe.getLocalizedMessage(), is("Object obj cannot be accessed: User bar may not delete objects from workspace deleteundelete")); assertThat("correct object returned", ioe.getInaccessibleObject(), is(o1)); } try { ws.setObjectsDeleted(new WorkspaceUser("bar"), obj1, false); fail("undeleted objects w/o auth"); } catch (InaccessibleObjectException ioe) { assertThat("correct exception", ioe.getLocalizedMessage(), is("Object obj cannot be accessed: User bar may not undelete objects from workspace deleteundelete")); assertThat("correct object returned", ioe.getInaccessibleObject(), is(o1)); } lastReadDate = ws.getWorkspaceInformation(user, read).getModDate(); ws.setObjectsDeleted(user, obj1, true); lastReadDate = assertWorkspaceDateUpdated(user, read, lastReadDate, "ws date updated on delete"); String err = String.format("Object 1 (name obj) in workspace %s has been deleted", wsid); failToGetDeletedObjects(user, objs, err); failToGetDeletedObjects(user, obj1, err); failToGetDeletedObjects(user, obj2, err); try { ws.setObjectsDeleted(user, obj2, true); //should have no effect } catch (NoSuchObjectException nsoe) { assertThat("correct exception", nsoe.getLocalizedMessage(), is("Object 1 (name obj) in workspace " + wsid + " has been deleted")); } failToGetDeletedObjects(user, objs, err); failToGetDeletedObjects(user, obj1, err); failToGetDeletedObjects(user, obj2, err); lastReadDate = ws.getWorkspaceInformation(user, read).getModDate(); ws.setObjectsDeleted(user, obj2, false); lastReadDate = assertWorkspaceDateUpdated(user, read, lastReadDate, "ws date updated on undelete"); checkNonDeletedObjs(user, idToData); lastReadDate = ws.getWorkspaceInformation(user, read).getModDate(); ws.setObjectsDeleted(user, obj1, false);//should have no effect lastReadDate = assertWorkspaceDateUpdated(user, read, lastReadDate, "ws date updated on undelete"); checkNonDeletedObjs(user, idToData); lastReadDate = ws.getWorkspaceInformation(user, read).getModDate(); ws.setObjectsDeleted(user, obj2, true); lastReadDate = assertWorkspaceDateUpdated(user, read, lastReadDate, "ws date updated on delete"); failToGetDeletedObjects(user, objs, err); failToGetDeletedObjects(user, obj1, err); failToGetDeletedObjects(user, obj2, err); //save should undelete ws.saveObjects(user, read, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("obj"), data1, SAFE_TYPE1, null, new Provenance(user), false)), getIdFactory()); ObjectIdentifier o3 = new ObjectIdentifier(read, "obj", 3); idToData.put(o3, data1); objs = new ArrayList<ObjectIdentifier>(idToData.keySet()); checkNonDeletedObjs(user, idToData); assertThat("can get ws description", ws.getWorkspaceDescription(user, read), is("descrip")); checkWSInfo(ws.getWorkspaceInformation(user, read), user, "deleteundelete", 1, Permission.OWNER, false, "unlocked", MT_META); WorkspaceUser bar = new WorkspaceUser("bar"); ws.setPermissions(user, read, Arrays.asList(bar), Permission.ADMIN); Map<User, Permission> p = new HashMap<User, Permission>(); p.put(user, Permission.OWNER); p.put(bar, Permission.ADMIN); assertThat("can get perms", ws.getPermissions( user, Arrays.asList(read)).get(0), is(p)); try { ws.setWorkspaceDeleted(bar, read, true); fail("Non owner deleted workspace"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception msg", e.getLocalizedMessage(), is("User bar may not delete workspace deleteundelete")); } WorkspaceInformation read1 = ws.getWorkspaceInformation(user, read); ws.setWorkspaceDeleted(user, read, true); WorkspaceInformation read2 = ws.listWorkspaces(user, null, null, null, null, null, true, true, false).get(0); try { ws.getWorkspaceDescription(user, read); fail("got description from deleted workspace"); } catch (NoSuchWorkspaceException e) { assertThat("correct exception msg", e.getLocalizedMessage(), is("Workspace deleteundelete is deleted")); } try { ws.getWorkspaceInformation(user, read); fail("got meta from deleted workspace"); } catch (NoSuchWorkspaceException e) { assertThat("correct exception msg", e.getLocalizedMessage(), is("Workspace deleteundelete is deleted")); } try { ws.setPermissions(user, read, Arrays.asList(bar), Permission.NONE); fail("set perms on deleted workspace"); } catch (NoSuchWorkspaceException e) { assertThat("correct exception msg", e.getLocalizedMessage(), is("Workspace deleteundelete is deleted")); } try { ws.getPermissions(user, Arrays.asList(read)); fail("got perms from deleted workspace"); } catch (NoSuchWorkspaceException e) { assertThat("correct exception msg", e.getLocalizedMessage(), is("Workspace deleteundelete is deleted")); } failGetObjects(bar, objs, new InaccessibleObjectException( "Object obj cannot be accessed: Workspace deleteundelete is deleted")); try { ws.getObjectInformation(bar, objs, false, false); fail("got obj meta from deleted workspace"); } catch (InaccessibleObjectException ioe) { assertThat("correct exception msg", ioe.getLocalizedMessage(), is("Object obj cannot be accessed: Workspace deleteundelete is deleted")); } try { ws.saveObjects(bar, read, Arrays.asList(sobj1), getIdFactory()); fail("saved objs from deleted workspace"); } catch (NoSuchWorkspaceException e) { assertThat("correct exception msg", e.getLocalizedMessage(), is("Workspace deleteundelete is deleted")); } try { ws.setObjectsDeleted(bar, obj1, true); } catch (InaccessibleObjectException ioe) { assertThat("correct exception msg", ioe.getLocalizedMessage(), is("Object obj cannot be accessed: Workspace deleteundelete is deleted")); assertThat("correct object returned", ioe.getInaccessibleObject(), is(o1)); } ws.setWorkspaceDeleted(user, read, false); WorkspaceInformation read3 = ws.getWorkspaceInformation(user, read); checkNonDeletedObjs(user, idToData); assertThat("can get ws description", ws.getWorkspaceDescription(user, read), is("descrip")); checkWSInfo(ws.getWorkspaceInformation(user, read), user, "deleteundelete", 1, Permission.OWNER, false, "unlocked", MT_META); ws.setPermissions(user, read, Arrays.asList(bar), Permission.ADMIN); assertThat("can get perms", ws.getPermissions( user, Arrays.asList(read)).get(0), is(p)); assertTrue("date changed on delete", read1.getModDate().before(read2.getModDate())); assertTrue("date changed on undelete", read2.getModDate().before(read3.getModDate())); } @Test public void testTypeMd5s() throws Exception { //see setUpWorkspaces() to find where needed specs are loaded String typeDefName = "SomeModule.AType"; Map<String,String> type2md5 = ws.translateToMd5Types(Arrays.asList(typeDefName + "-1.0"),null); Assert.assertEquals(1, type2md5.size()); String md5TypeDef = type2md5.get(typeDefName + "-1.0"); Assert.assertNotNull(md5TypeDef); Map<String, List<String>> md52semantic = ws.translateFromMd5Types(Arrays.asList(md5TypeDef)); Assert.assertEquals(1, md52semantic.size()); List<String> semList = md52semantic.get(md5TypeDef); Assert.assertNotNull(semList); Assert.assertEquals(2, semList.size()); for (String semText : semList) { TypeDefId semTypeDef = TypeDefId.fromTypeString(semText); Assert.assertEquals(typeDefName, semTypeDef.getType().getTypeString()); String verText = semTypeDef.getVerString(); Assert.assertTrue("0.1".equals(verText) || "1.0".equals(verText)); } } @Test public void testListModules() throws Exception { //see setUpWorkspaces() to find where needed specs are loaded Map<String,String> moduleNamesInList = new HashMap<String,String>(); for(String mod:ws.listModules(null)) { moduleNamesInList.put(mod, ""); } Assert.assertTrue(moduleNamesInList.containsKey("SomeModule")); Assert.assertTrue(moduleNamesInList.containsKey("TestModule")); } @Test public void testListModuleVersions() throws Exception { //see setUpWorkspaces() to find where needed specs are loaded Assert.assertEquals(3, ws.getModuleVersions("SomeModule", null).size()); Assert.assertEquals(4, ws.getModuleVersions("SomeModule", new WorkspaceUser("foo")).size()); Assert.assertEquals(2, ws.getModuleVersions("TestModule", null).size()); Assert.assertEquals(5, ws.getModuleVersions("TestModule", new WorkspaceUser("foo")).size()); } @Test public void testGetModuleInfo() throws Exception { //see setUpWorkspaces() to find where needed specs are loaded ModuleInfo m = ws.getModuleInfo(null, new ModuleDefId("TestModule")); Assert.assertTrue(m.isReleased()); Map<String,String> funcNamesInList = new HashMap<String,String>(); for(String func : m.getFunctions() ){ funcNamesInList.put(func, ""); } Assert.assertTrue(funcNamesInList.containsKey("TestModule.getFeature-2.0")); Assert.assertTrue(funcNamesInList.containsKey("TestModule.getGenome-1.0")); Map<String,String> typeNamesInList = new HashMap<String,String>(); for(Entry<AbsoluteTypeDefId, String> type : m.getTypes().entrySet() ){ typeNamesInList.put(type.getKey().getTypeString(),""); } Assert.assertTrue(typeNamesInList.containsKey("TestModule.Genome-2.0")); Assert.assertTrue(typeNamesInList.containsKey("TestModule.Feature-1.0")); try { ws.getModuleInfo(null, new ModuleDefId("MadeUpModuleThatIsNotThere")); fail("getModuleInfo of non existant module should throw a NoSuchModuleException"); } catch (NoSuchModuleException e) {} ModuleInfo m2 = ws.getModuleInfo(new WorkspaceUser("foo"), new ModuleDefId("UnreleasedModule")); Assert.assertEquals("foo", m2.getOwners().get(0)); Assert.assertFalse(m2.isReleased()); List<Long> verList = ws.getModuleVersions("UnreleasedModule", new WorkspaceUser("foo")); Assert.assertEquals(1, verList.size()); Assert.assertEquals(m2.getVersion(), verList.get(0)); } @Test public void testGetJsonSchema() throws Exception { //see setUpWorkspaces() to find where needed specs are loaded try { ws.getJsonSchema(new TypeDefId("TestModule.NonExistantType"), null); fail("getJsonSchema of non existant type should throw a NoSuchTypeException"); } catch (NoSuchTypeException e) {} // get several different schemas, make sure that no exceptions are thrown and it is valid json! String schema = ws.getJsonSchema(new TypeDefId(new TypeDefName("TestModule.Genome"),2,0), null); ObjectMapper mapper = new ObjectMapper(); JsonNode schemaNode = mapper.readTree(schema); Assert.assertEquals("Genome", schemaNode.get("id").asText()); schema = ws.getJsonSchema(new TypeDefId(new TypeDefName("TestModule.Genome"),2), null); schemaNode = mapper.readTree(schema); Assert.assertEquals("Genome", schemaNode.get("id").asText()); schema = ws.getJsonSchema(new TypeDefId("TestModule.Genome"), null); schemaNode = mapper.readTree(schema); Assert.assertEquals("Genome", schemaNode.get("id").asText()); } @Test public void testGetTypeInfo() throws Exception { //see setUpWorkspaces() to find where needed specs are loaded TypeDetailedInfo info = ws.getTypeInfo("TestModule.Genome", false, null); Assert.assertEquals("TestModule.Genome-2.0",info.getTypeDefId()); Assert.assertEquals(1, info.getReleasedModuleVersions().size()); Assert.assertEquals(2, info.getReleasedTypeVersions().size()); info = ws.getTypeInfo("TestModule.Feature", false, null); Assert.assertEquals("TestModule.Feature-1.0",info.getTypeDefId()); Assert.assertEquals(2, info.getReleasedModuleVersions().size()); Assert.assertEquals(1, info.getReleasedTypeVersions().size()); TypeDetailedInfo info2 = ws.getTypeInfo("UnreleasedModule.AType-0.1", false, new WorkspaceUser("foo")); Assert.assertEquals(1, info2.getUsingFuncDefIds().size()); Assert.assertEquals(1, info2.getModuleVersions().size()); Assert.assertEquals(1, info2.getTypeVersions().size()); Assert.assertEquals(0, info2.getReleasedModuleVersions().size()); Assert.assertEquals(0, info2.getReleasedTypeVersions().size()); Assert.assertTrue(info2.getJsonSchema().contains("kidl-structure")); Assert.assertTrue(info2.getParsingStructure().contains("Bio::KBase::KIDL::KBT::Typedef")); } @Test public void testGetFuncInfo() throws Exception { //see setUpWorkspaces() to find where needed specs are loaded try { ws.getFuncInfo("NoModuleThatExists.getFeature", false, null); fail("getFuncInfo of non existant module should throw a NoSuchModuleException"); } catch (NoSuchModuleException e) {} try { ws.getFuncInfo("TestModule.noFunctionThatIKnowOf", false, null); fail("getFuncInfo of non existant module should throw a NoSuchFuncException"); } catch (NoSuchFuncException e) {} FuncDetailedInfo info = ws.getFuncInfo("TestModule.getFeature", false, null); Assert.assertEquals("TestModule.getFeature-2.0",info.getFuncDefId()); Assert.assertEquals(1, info.getReleasedModuleVersions().size()); Assert.assertEquals(2, info.getReleasedFuncVersions().size()); info = ws.getFuncInfo("TestModule.getGenome-1.0", false, null); Assert.assertEquals("TestModule.getGenome-1.0",info.getFuncDefId()); Assert.assertEquals(1, info.getReleasedModuleVersions().size()); Assert.assertEquals(1, info.getReleasedFuncVersions().size()); FuncDetailedInfo info2 = ws.getFuncInfo("UnreleasedModule.aFunc-0.1", false, new WorkspaceUser("foo")); Assert.assertEquals(1, info2.getUsedTypeDefIds().size()); Assert.assertEquals(1, info2.getModuleVersions().size()); Assert.assertEquals(1, info2.getFuncVersions().size()); Assert.assertEquals(0, info2.getReleasedModuleVersions().size()); Assert.assertEquals(0, info2.getReleasedFuncVersions().size()); Assert.assertTrue(info2.getParsingStructure().contains("Bio::KBase::KIDL::KBT::Funcdef")); } private void setUpCopyWorkspaces(WorkspaceUser user1, WorkspaceUser user2, String refws, String ws1, String ws2) throws Exception { TypeDefId reftype = new TypeDefId(new TypeDefName("CopyRev", "RefType"), 1, 0); WorkspaceIdentifier refs = new WorkspaceIdentifier(refws); ws.createWorkspace(user1, refs.getName(), false, null, null); LinkedList<WorkspaceSaveObject> refobjs = new LinkedList<WorkspaceSaveObject>(); for (int i = 0; i < 4; i++) { refobjs.add(new WorkspaceSaveObject(new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(user1), false)); } ws.saveObjects(user1, refs, refobjs, getIdFactory()); List<WorkspaceSaveObject> wso = Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("auto2"), new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(user1), false)); ws.saveObjects(user1, refs, wso, getIdFactory()); ws.saveObjects(user1, refs, wso, getIdFactory()); Map<String, String> meta1 = makeSimpleMeta("foo", "bar"); Map<String, String> meta2 = makeSimpleMeta("foo", "baz"); Map<String, String> meta3 = makeSimpleMeta("foo", "bak"); Map<String, List<String>> data1 = makeRefData(refws + "/auto2/2"); Map<String, List<String>> data2 = makeRefData(refws + "/auto4"); Map<String, List<String>> data3 = makeRefData(refws + "/auto1"); Provenance prov1 = new Provenance(user1); prov1.addAction(new ProvenanceAction() .withCommandLine("A command line") .withDescription("descrip") .withIncomingArgs(Arrays.asList("a", "b", "c")) .withMethod("method") .withMethodParameters(Arrays.asList((Object) meta1)) .withOutgoingArgs(Arrays.asList("d", "e", "f")) .withScript("script") .withScriptVersion("2.1") .withServiceName("service") .withServiceVersion("3") .withTime(new Date(45)) .withWorkspaceObjects(Arrays.asList(refws + "/auto3", refws + "/auto2/2"))); prov1.addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList(refws + "/auto2/1", refws + "/auto1"))); Provenance prov2 = new Provenance(user1); Provenance prov3 = new Provenance(user1); prov2.addAction(new ProvenanceAction(prov1.getActions().get(0)).withServiceVersion("4") .withWorkspaceObjects(Arrays.asList(refws + "/auto2"))); prov3.addAction(new ProvenanceAction(prov1.getActions().get(0)).withServiceVersion("5") .withWorkspaceObjects(Arrays.asList(refws + "/auto3/1"))); WorkspaceIdentifier cp1 = new WorkspaceIdentifier(ws1); WorkspaceIdentifier cp2 = new WorkspaceIdentifier(ws2); ws.createWorkspace(user1, cp1.getName(), false, null, null).getId(); ws.createWorkspace(user2, cp2.getName(), false, null, null).getId(); saveObject(user1, cp1, meta1, data1, reftype, "hide", prov1, true); saveObject(user1, cp1, meta2, data2, reftype, "hide", prov2, true); saveObject(user1, cp1, meta3, data3, reftype, "hide", prov2, true); saveObject(user1, cp1, meta1, data1, reftype, "orig", prov1); saveObject(user1, cp1, meta2, data2, reftype, "orig", prov2); saveObject(user1, cp1, meta3, data3, reftype, "orig", prov3); saveObject(user1, cp1, meta1, data1, reftype, "hidetarget", prov1, true); } @Test public void copyRevert() throws Exception { WorkspaceUser user1 = new WorkspaceUser("foo"); WorkspaceUser user2 = new WorkspaceUser("bar"); String wsrefs = "copyrevertrefs"; String ws1 = "copyrevert1"; String ws2 = "copyrevert2"; setUpCopyWorkspaces(user1, user2, wsrefs, ws1, ws2); WorkspaceIdentifier cp1 = new WorkspaceIdentifier(ws1); WorkspaceIdentifier cp2 = new WorkspaceIdentifier(ws2); WorkspaceInformation cp1info = ws.getWorkspaceInformation(user1, cp1); WorkspaceInformation cp2info = ws.getWorkspaceInformation(user2, cp2); long wsid1 = cp1info.getId(); long wsid2 = cp2info.getId(); Date cp1LastDate = cp1info.getModDate(); Date cp2LastDate = cp2info.getModDate(); ObjectIdentifier oihide = new ObjectIdentifier(cp1, "hide"); List<ObjectInformation> objs = ws.getObjectHistory(user1, oihide); ObjectInformation save11 = objs.get(0); ObjectInformation save12 = objs.get(1); ObjectInformation save13 = objs.get(2); WorkspaceObjectData wod = ws.getObjects(user1, Arrays.asList(oihide)).get(0); WorkspaceObjectData swod = ws.getObjectsSubSet(user1, objIDToSubObjID(Arrays.asList(oihide))).get(0); WorkspaceObjectInformation woi = ws.getObjectProvenance(user1, Arrays.asList(oihide)).get(0); assertThat("copy ref for obj is null", wod.getCopyReference(), is((Reference) null)); assertThat("copy ref for sub obj is null", swod.getCopyReference(), is((Reference) null)); assertThat("copy ref for prov is null", woi.getCopyReference(), is((Reference) null)); //copy entire stack of hidden objects cp1LastDate = ws.getWorkspaceInformation(user1, cp1).getModDate(); ObjectInformation copied = ws.copyObject(user1, ObjectIdentifier.parseObjectReference("copyrevert1/hide"), ObjectIdentifier.parseObjectReference("copyrevert1/copyhide")); cp1LastDate = assertWorkspaceDateUpdated(user1, cp1, cp1LastDate, "ws date updated on copy"); compareObjectAndInfo(save13, copied, user1, wsid1, cp1.getName(), 4, "copyhide", 3); List<ObjectInformation> copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, 4)); compareObjectAndInfo(save11, copystack.get(0), user1, wsid1, cp1.getName(), 4, "copyhide", 1); compareObjectAndInfo(save12, copystack.get(1), user1, wsid1, cp1.getName(), 4, "copyhide", 2); compareObjectAndInfo(save13, copystack.get(2), user1, wsid1, cp1.getName(), 4, "copyhide", 3); checkUnhiddenObjectCount(user1, cp1, 6, 10); objs = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "orig")); save11 = objs.get(0); save12 = objs.get(1); save13 = objs.get(2); //copy stack of unhidden objects copied = ws.copyObject(user1, ObjectIdentifier.parseObjectReference("copyrevert1/orig"), ObjectIdentifier.parseObjectReference("copyrevert1/copied")); cp1LastDate = assertWorkspaceDateUpdated(user1, cp1, cp1LastDate, "ws date updated on copy"); compareObjectAndInfo(save13, copied, user1, wsid1, cp1.getName(), 5, "copied", 3); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "copied")); compareObjectAndInfo(save11, copystack.get(0), user1, wsid1, cp1.getName(), 5, "copied", 1); compareObjectAndInfo(save12, copystack.get(1), user1, wsid1, cp1.getName(), 5, "copied", 2); compareObjectAndInfo(save13, copystack.get(2), user1, wsid1, cp1.getName(), 5, "copied", 3); checkUnhiddenObjectCount(user1, cp1, 9, 13); //copy visible object to pre-existing hidden object copied = ws.copyObject(user1, ObjectIdentifier.parseObjectReference("copyrevert1/orig"), new ObjectIdentifier(cp1, "hidetarget")); cp1LastDate = assertWorkspaceDateUpdated(user1, cp1, cp1LastDate, "ws date updated on copy"); compareObjectAndInfo(save13, copied, user1, wsid1, cp1.getName(), 3, "hidetarget", 2); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, 3)); //0 is original object compareObjectAndInfo(save13, copystack.get(1), user1, wsid1, cp1.getName(), 3, "hidetarget", 2); checkUnhiddenObjectCount(user1, cp1, 9, 14); //copy hidden object to pre-existing visible object //check that the to version is ignored copied = ws.copyObject(user1, new ObjectIdentifier(cp1, "orig"), new ObjectIdentifier(cp1, 5, 600)); compareObjectAndInfo(save13, copied, user1, wsid1, cp1.getName(), 5, "copied", 4); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, 5)); compareObjectAndInfo(save13, copystack.get(3), user1, wsid1, cp1.getName(), 5, "copied", 4); checkUnhiddenObjectCount(user1, cp1, 10, 15); //copy specific version to existing object copied = ws.copyObject(user1, new ObjectIdentifier(new WorkspaceIdentifier(wsid1), 2, 2), ObjectIdentifier.parseObjectReference("copyrevert1/copied")); compareObjectAndInfo(save12, copied, user1, wsid1, cp1.getName(), 5, "copied", 5); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "copied")); compareObjectAndInfo(save11, copystack.get(0), user1, wsid1, cp1.getName(), 5, "copied", 1); compareObjectAndInfo(save12, copystack.get(1), user1, wsid1, cp1.getName(), 5, "copied", 2); compareObjectAndInfo(save13, copystack.get(2), user1, wsid1, cp1.getName(), 5, "copied", 3); compareObjectAndInfo(save13, copystack.get(3), user1, wsid1, cp1.getName(), 5, "copied", 4); compareObjectAndInfo(save12, copystack.get(4), user1, wsid1, cp1.getName(), 5, "copied", 5); checkUnhiddenObjectCount(user1, cp1, 11, 16); //copy specific version to hidden existing object copied = ws.copyObject(user1, new ObjectIdentifier(new WorkspaceIdentifier(wsid1), 2, 2), ObjectIdentifier.parseObjectReference("copyrevert1/hidetarget")); compareObjectAndInfo(save12, copied, user1, wsid1, cp1.getName(), 3, "hidetarget", 3); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "hidetarget")); //0 is original object compareObjectAndInfo(save13, copystack.get(1), user1, wsid1, cp1.getName(), 3, "hidetarget", 2); compareObjectAndInfo(save12, copystack.get(2), user1, wsid1, cp1.getName(), 3, "hidetarget", 3); checkUnhiddenObjectCount(user1, cp1, 11, 17); //copy specific version to new object copied = ws.copyObject(user1, new ObjectIdentifier(new WorkspaceIdentifier(wsid1), 2, 2), ObjectIdentifier.parseObjectReference("copyrevert1/newobj")); compareObjectAndInfo(save12, copied, user1, wsid1, cp1.getName(), 6, "newobj", 1); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "newobj")); compareObjectAndInfo(save12, copystack.get(0), user1, wsid1, cp1.getName(), 6, "newobj", 1); checkUnhiddenObjectCount(user1, cp1, 12, 18); //revert normal object cp1LastDate = ws.getWorkspaceInformation(user1, cp1).getModDate(); copied = ws.revertObject(user1, ObjectIdentifier.parseObjectReference("copyrevert1/copied/2")); cp1LastDate = assertWorkspaceDateUpdated(user1, cp1, cp1LastDate, "ws date updated on revert"); compareObjectAndInfo(save12, copied, user1, wsid1, cp1.getName(), 5, "copied", 6); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "copied")); compareObjectAndInfo(save11, copystack.get(0), user1, wsid1, cp1.getName(), 5, "copied", 1); compareObjectAndInfo(save12, copystack.get(1), user1, wsid1, cp1.getName(), 5, "copied", 2); compareObjectAndInfo(save13, copystack.get(2), user1, wsid1, cp1.getName(), 5, "copied", 3); compareObjectAndInfo(save13, copystack.get(3), user1, wsid1, cp1.getName(), 5, "copied", 4); compareObjectAndInfo(save12, copystack.get(4), user1, wsid1, cp1.getName(), 5, "copied", 5); compareObjectAndInfo(save12, copystack.get(5), user1, wsid1, cp1.getName(), 5, "copied", 6); checkUnhiddenObjectCount(user1, cp1, 13, 19); //revert hidden object copied = ws.revertObject(user1, ObjectIdentifier.parseObjectReference("copyrevert1/hidetarget/2")); cp1LastDate = assertWorkspaceDateUpdated(user1, cp1, cp1LastDate, "ws date updated on revert"); compareObjectAndInfo(save13, copied, user1, wsid1, cp1.getName(), 3, "hidetarget", 4); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "hidetarget")); //0 is original object compareObjectAndInfo(save13, copystack.get(1), user1, wsid1, cp1.getName(), 3, "hidetarget", 2); compareObjectAndInfo(save12, copystack.get(2), user1, wsid1, cp1.getName(), 3, "hidetarget", 3); compareObjectAndInfo(save13, copystack.get(3), user1, wsid1, cp1.getName(), 3, "hidetarget", 4); checkUnhiddenObjectCount(user1, cp1, 13, 20); //copy to new ws ws.setPermissions(user2, cp2, Arrays.asList(user1), Permission.WRITE); cp2LastDate = ws.getWorkspaceInformation(user1, cp2).getModDate(); copied = ws.copyObject(user1, ObjectIdentifier.parseObjectReference("copyrevert1/orig"), ObjectIdentifier.parseObjectReference("copyrevert2/copied")); cp2LastDate = assertWorkspaceDateUpdated(user1, cp2, cp2LastDate, "ws date updated on copy"); compareObjectAndInfo(save13, copied, user1, wsid2, cp2.getName(), 1, "copied", 3); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp2, "copied")); compareObjectAndInfo(save11, copystack.get(0), user1, wsid2, cp2.getName(), 1, "copied", 1); compareObjectAndInfo(save12, copystack.get(1), user1, wsid2, cp2.getName(), 1, "copied", 2); compareObjectAndInfo(save13, copystack.get(2), user1, wsid2, cp2.getName(), 1, "copied", 3); checkUnhiddenObjectCount(user1, cp2, 3, 3); checkUnhiddenObjectCount(user1, cp1, 13, 20); //copy to deleted object ws.setObjectsDeleted(user1, Arrays.asList( ObjectIdentifier.parseObjectReference("copyrevert1/copied")), true); copied = ws.copyObject(user1, ObjectIdentifier.parseObjectReference("copyrevert1/orig"), ObjectIdentifier.parseObjectReference("copyrevert1/copied")); compareObjectAndInfo(save13, copied, user1, wsid1, cp1.getName(), 5, "copied", 7); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "copied")); compareObjectAndInfo(save11, copystack.get(0), user1, wsid1, cp1.getName(), 5, "copied", 1); compareObjectAndInfo(save12, copystack.get(1), user1, wsid1, cp1.getName(), 5, "copied", 2); compareObjectAndInfo(save13, copystack.get(2), user1, wsid1, cp1.getName(), 5, "copied", 3); compareObjectAndInfo(save13, copystack.get(3), user1, wsid1, cp1.getName(), 5, "copied", 4); compareObjectAndInfo(save12, copystack.get(4), user1, wsid1, cp1.getName(), 5, "copied", 5); compareObjectAndInfo(save12, copystack.get(5), user1, wsid1, cp1.getName(), 5, "copied", 6); compareObjectAndInfo(save13, copystack.get(6), user1, wsid1, cp1.getName(), 5, "copied", 7); checkUnhiddenObjectCount(user1, cp1, 14, 21); failCopy(null, new ObjectIdentifier(cp1, "whooga"), new ObjectIdentifier(cp1, "hidetarget"), new InaccessibleObjectException( "Object whooga cannot be accessed: Anonymous users may not read workspace copyrevert1")); failRevert(null, new ObjectIdentifier(cp1, "whooga"), new InaccessibleObjectException( "Object whooga cannot be accessed: Anonymous users may not write to workspace copyrevert1")); failCopy(user1, new ObjectIdentifier(cp1, "foo"), new ObjectIdentifier(cp1, "bar"), new NoSuchObjectException( "No object with name foo exists in workspace " + wsid1)); failRevert(user1, new ObjectIdentifier(cp1, "foo"), new NoSuchObjectException( "No object with name foo exists in workspace " + wsid1)); failRevert(user1, new ObjectIdentifier(cp1, "orig", 4), new NoSuchObjectException( "No object with id 2 (name orig) and version 4 exists in workspace " + wsid1)); failCopy(user1, new ObjectIdentifier(cp1, "orig"), new ObjectIdentifier(cp1, 7), new NoSuchObjectException( "Copy destination is specified as object id 7 in workspace " + wsid1 + " which does not exist.")); ws.setObjectsDeleted(user1, Arrays.asList(new ObjectIdentifier(cp1, "copied")), true); failCopy(user1, new ObjectIdentifier(cp1, "copied"), new ObjectIdentifier(cp1, "hidetarget"), new NoSuchObjectException( "Object 5 (name copied) in workspace " + wsid1 + " has been deleted")); failRevert(user1, new ObjectIdentifier(cp1, "copied"), new NoSuchObjectException( "Object 5 (name copied) in workspace " + wsid1 + " has been deleted")); //now works // failCopy(user1, new ObjectIdentifier(cp1, "orig"), // new ObjectIdentifier(cp1, "copied"), new NoSuchObjectException( // "Object 5 (name copied) in workspace " + wsid1 + " has been deleted")); cp2LastDate = ws.getWorkspaceInformation(user1, cp2).getModDate(); ws.copyObject(user1, new ObjectIdentifier(cp1, "orig"), new ObjectIdentifier(cp2, "foo")); //should work cp2LastDate = assertWorkspaceDateUpdated(user1, cp2, cp2LastDate, "ws date updated on copy"); ws.setWorkspaceDeleted(user2, cp2, true); failCopy(user1, new ObjectIdentifier(cp1, "orig"), new ObjectIdentifier(cp2, "foo1"), new InaccessibleObjectException("Object foo1 cannot be accessed: Workspace copyrevert2 is deleted")); failCopy(user1, new ObjectIdentifier(cp2, "foo"), new ObjectIdentifier(cp2, "foo1"), new InaccessibleObjectException("Object foo cannot be accessed: Workspace copyrevert2 is deleted")); failRevert(user1, new ObjectIdentifier(cp2, "foo"), new InaccessibleObjectException("Object foo cannot be accessed: Workspace copyrevert2 is deleted")); ws.setWorkspaceDeleted(user2, cp2, false); ws.setPermissions(user2, cp2, Arrays.asList(user1), Permission.READ); ws.copyObject(user1, new ObjectIdentifier(cp2, "foo"), new ObjectIdentifier(cp1, "foo")); //should work failCopy(user1, new ObjectIdentifier(cp1, "foo"), new ObjectIdentifier(cp2, "foo"), new InaccessibleObjectException("Object foo cannot be accessed: User foo may not write to workspace copyrevert2")); failRevert(user1, new ObjectIdentifier(cp2, "foo", 1), new InaccessibleObjectException("Object foo cannot be accessed: User foo may not write to workspace copyrevert2")); ws.setPermissions(user2, cp2, Arrays.asList(user1), Permission.NONE); failCopy(user1, new ObjectIdentifier(cp2, "foo"), new ObjectIdentifier(cp1, "foo"), new InaccessibleObjectException("Object foo cannot be accessed: User foo may not read workspace copyrevert2")); ws.setPermissions(user2, cp2, Arrays.asList(user1), Permission.WRITE); ws.lockWorkspace(user2, cp2); failCopy(user1, new ObjectIdentifier(cp1, "orig"), new ObjectIdentifier(cp2, "foo2"), new InaccessibleObjectException( "Object foo2 cannot be accessed: The workspace with id " + wsid2 + ", name copyrevert2, is locked and may not be modified")); failRevert(user1, new ObjectIdentifier(cp2, "foo1", 1), new InaccessibleObjectException( "Object foo1 cannot be accessed: The workspace with id " + wsid2 + ", name copyrevert2, is locked and may not be modified")); } private void checkUnhiddenObjectCount(WorkspaceUser user, WorkspaceIdentifier wsi, int unhidden, int all) throws Exception { ListObjectsParameters lop = new ListObjectsParameters( user, Arrays.asList(wsi)) .withShowAllVersions(true); List<ObjectInformation> objs = ws.listObjects(lop); assertThat("orig objects hidden", objs.size(), is(unhidden)); lop.withShowHidden(true); objs = ws.listObjects(lop); assertThat("orig objects hidden", objs.size(), is(all)); } @Test public void copyReferenceVisibility() throws Exception { WorkspaceUser user1 = new WorkspaceUser("foo"); WorkspaceUser user2 = new WorkspaceUser("foo2"); WorkspaceIdentifier wsiSource1 = new WorkspaceIdentifier("copyRefVisSource1"); WorkspaceIdentifier wsiSource2 = new WorkspaceIdentifier("copyRefVisSource2"); WorkspaceIdentifier wsiCopied = new WorkspaceIdentifier("copyRefVisCopied"); long wsid1 = ws.createWorkspace(user1, wsiSource1.getName(), false, null, null).getId(); ws.setPermissions(user1, wsiSource1, Arrays.asList(user2), Permission.READ); long wsid2 = ws.createWorkspace(user1, wsiSource2.getName(), false, null, null).getId(); ws.setPermissions(user1, wsiSource2, Arrays.asList(user2), Permission.READ); ws.createWorkspace(user2, wsiCopied.getName(), false, null, null); Provenance emptyprov1 = new Provenance(user1); Provenance emptyprov2 = new Provenance(user2); List<WorkspaceSaveObject> data = new LinkedList<WorkspaceSaveObject>(); data.add(new WorkspaceSaveObject(new HashMap<String, Object>(), SAFE_TYPE1, null, emptyprov1, false)); ws.saveObjects(user1, wsiSource1, data, new IdReferenceHandlerSetFactory(0)); ws.saveObjects(user1, wsiSource2, data, new IdReferenceHandlerSetFactory(0)); final ObjectIdentifier source1 = new ObjectIdentifier(wsiSource1, 1); final ObjectIdentifier source2 = new ObjectIdentifier(wsiSource2, 1); final ObjectIdentifier copied1 = new ObjectIdentifier(wsiCopied, "foo"); final ObjectIdentifier copied2 = new ObjectIdentifier(wsiCopied, "foo1"); ws.copyObject(user2, source1, copied1); ws.copyObject(user2, source2, copied2); ws.saveObjects(user2, wsiCopied, data, new IdReferenceHandlerSetFactory(0)); final ObjectIdentifier nocopy = new ObjectIdentifier(wsiCopied, 3L); data.clear(); Map<String, Object> ref = new HashMap<String, Object>(); ref.put("refs", Arrays.asList(wsiCopied.getName() + "/foo")); data.add(new WorkspaceSaveObject(ref, REF_TYPE, null, emptyprov2, false)); ws.saveObjects(user2, wsiCopied, data, new IdReferenceHandlerSetFactory(1)); ObjectChain copyoc1 = new ObjectChain(new ObjectIdentifier(wsiCopied, 4L), Arrays.asList(copied1)); ref.put("refs", Arrays.asList(wsiCopied.getName() + "/foo1")); ws.saveObjects(user2, wsiCopied, data, new IdReferenceHandlerSetFactory(1)); ObjectChain copyoc2 = new ObjectChain(new ObjectIdentifier(wsiCopied, 5L), Arrays.asList(copied2)); ref.put("refs", Arrays.asList(wsiCopied.getName() + "/3")); ws.saveObjects(user2, wsiCopied, data, new IdReferenceHandlerSetFactory(1)); ObjectChain nocopyoc = new ObjectChain(new ObjectIdentifier(wsiCopied, 6L), Arrays.asList(nocopy)); final TestReference expectedRef1 = new TestReference(wsid1, 1, 1); final TestReference expectedRef2 = new TestReference(wsid2, 1, 1); List<ObjectIdentifier> testobjs = Arrays.asList(copied1, nocopy, copied2); List<ObjectChain> testocs = Arrays.asList(copyoc1, nocopyoc, copyoc2); List<TestReference> refnullref = Arrays.asList( expectedRef1, (TestReference) null, expectedRef2); List<TestReference> nullnullref = Arrays.asList( (TestReference) null, (TestReference) null, expectedRef2); List<TestReference> refnullnull = Arrays.asList( expectedRef1, (TestReference) null, (TestReference) null); List<Boolean> fff = Arrays.asList(false, false, false); List<Boolean> tff = Arrays.asList(true, false, false); List<Boolean> fft = Arrays.asList(false, false, true); checkCopyReference(user2, testobjs, testocs, refnullref, fff); //check 1st ref ws.setPermissions(user1, wsiSource1, Arrays.asList(user2), Permission.NONE); checkCopyReference(user2, testobjs, testocs, nullnullref, tff); ws.setPermissions(user1, wsiSource1, Arrays.asList(user2), Permission.READ); checkCopyReference(user2, testobjs, testocs, refnullref, fff); ws.setObjectsDeleted(user1, Arrays.asList(source1), true); checkCopyReference(user2, testobjs, testocs, nullnullref, tff); ws.setObjectsDeleted(user1, Arrays.asList(source1), false); checkCopyReference(user2, testobjs, testocs, refnullref, fff); ws.setWorkspaceDeleted(user1, wsiSource1, true); checkCopyReference(user2, testobjs, testocs, nullnullref, tff); ws.setWorkspaceDeleted(user1, wsiSource1, false); checkCopyReference(user2, testobjs, testocs, refnullref, fff); //check 2nd ref ws.setPermissions(user1, wsiSource2, Arrays.asList(user2), Permission.NONE); checkCopyReference(user2, testobjs, testocs, refnullnull, fft); ws.setPermissions(user1, wsiSource2, Arrays.asList(user2), Permission.READ); checkCopyReference(user2, testobjs, testocs, refnullref, fff); ws.setObjectsDeleted(user1, Arrays.asList(source2), true); checkCopyReference(user2, testobjs, testocs, refnullnull, fft); ws.setObjectsDeleted(user1, Arrays.asList(source2), false); checkCopyReference(user2, testobjs, testocs, refnullref, fff); ws.setWorkspaceDeleted(user1, wsiSource2, true); checkCopyReference(user2, testobjs, testocs, refnullnull, fft); ws.setWorkspaceDeleted(user1, wsiSource2, false); checkCopyReference(user2, testobjs, testocs, refnullref, fff); } private void checkCopyReference(WorkspaceUser user, List<ObjectIdentifier> testobjs, List<ObjectChain> testocs, List<TestReference> testRef, List<Boolean> copyAccessible) throws Exception { List<List<WorkspaceObjectInformation>> infos = new LinkedList<List<WorkspaceObjectInformation>>(); infos.add(ws.getObjectProvenance(user, testobjs)); infos.add(fromObjectData(ws.getObjects(user, testobjs))); infos.add(fromObjectData(ws.getObjectsSubSet(user, objIDToSubObjID(testobjs)))); infos.add(fromObjectData(ws.getReferencedObjects(user, testocs))); for (List<WorkspaceObjectInformation> info: infos) { for (int i = 0; i < info.size(); i++) { WorkspaceObjectInformation inf = info.get(i); assertThat("correct reference ", inf.getCopyReference() == null ? null : new TestReference(inf.getCopyReference()), is(testRef.get(i))); assertThat("correct inaccessibility", inf.isCopySourceInaccessible(), is(copyAccessible.get(i))); } } } private List<WorkspaceObjectInformation> fromObjectData( List<WorkspaceObjectData> data) { List<WorkspaceObjectInformation> ret = new LinkedList<WorkspaceObjectInformation>(); for (WorkspaceObjectData d: data) { ret.add((WorkspaceObjectInformation) d); } return ret; } @Test public void cloneWorkspace() throws Exception { WorkspaceUser user1 = new WorkspaceUser("foo"); WorkspaceUser user2 = new WorkspaceUser("bar"); String wsrefs = "clonerefs"; String ws1 = "clone1"; setUpCopyWorkspaces(user1, user2, wsrefs, ws1, "cloneunused"); WorkspaceIdentifier cp1 = new WorkspaceIdentifier(ws1); WorkspaceIdentifier clone1 = new WorkspaceIdentifier("newclone"); Map<String, String> premeta = new HashMap<String, String>(); premeta.put("clone", "workspace"); WorkspaceUserMetadata meta = new WorkspaceUserMetadata(premeta); WorkspaceInformation info1 = ws.cloneWorkspace(user1, cp1, clone1.getName(), false, null, meta); checkWSInfo(clone1, user1, "newclone", 3, Permission.OWNER, false, info1.getId(), info1.getModDate(), "unlocked", premeta); assertNull("desc ok", ws.getWorkspaceDescription(user1, clone1)); List<ObjectInformation> objs = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "hide")); ObjectInformation save11 = objs.get(0); ObjectInformation save12 = objs.get(1); ObjectInformation save13 = objs.get(2); objs = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "orig")); ObjectInformation save21 = objs.get(0); ObjectInformation save22 = objs.get(1); ObjectInformation save23 = objs.get(2); objs = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "hidetarget")); ObjectInformation save31 = objs.get(0); List<ObjectInformation> hideobjs = ws.getObjectHistory(user1, new ObjectIdentifier(clone1, "hide")); long id = hideobjs.get(0).getObjectId(); compareObjectAndInfo(save11, hideobjs.get(0), user1, info1.getId(), clone1.getName(), id, "hide", 1); compareObjectAndInfo(save12, hideobjs.get(1), user1, info1.getId(), clone1.getName(), id, "hide", 2); compareObjectAndInfo(save13, hideobjs.get(2), user1, info1.getId(), clone1.getName(), id, "hide", 3); List<ObjectInformation> origobjs = ws.getObjectHistory(user1, new ObjectIdentifier(clone1, "orig")); id = origobjs.get(0).getObjectId(); compareObjectAndInfo(save21, origobjs.get(0), user1, info1.getId(), clone1.getName(), id, "orig", 1); compareObjectAndInfo(save22, origobjs.get(1), user1, info1.getId(), clone1.getName(), id, "orig", 2); compareObjectAndInfo(save23, origobjs.get(2), user1, info1.getId(), clone1.getName(), id, "orig", 3); List<ObjectInformation> hidetarget = ws.getObjectHistory(user1, new ObjectIdentifier(clone1, "hidetarget")); id = hidetarget.get(0).getObjectId(); compareObjectAndInfo(save31, hidetarget.get(0), user1, info1.getId(), clone1.getName(), id, "hidetarget", 1); checkUnhiddenObjectCount(user1, clone1, 3, 7); ws.setObjectsDeleted(user1, Arrays.asList(new ObjectIdentifier(cp1, "hide")), true); WorkspaceIdentifier clone2 = new WorkspaceIdentifier("newclone2"); WorkspaceInformation info2 = ws.cloneWorkspace(user1, cp1, clone2.getName(), true, "my desc", null); checkWSInfo(clone2, user1, "newclone2", 2, Permission.OWNER, true, info2.getId(), info2.getModDate(), "unlocked", MT_META); assertThat("desc ok", ws.getWorkspaceDescription(user1, clone2), is("my desc")); origobjs = ws.getObjectHistory(user1, new ObjectIdentifier(clone2, "orig")); id = origobjs.get(0).getObjectId(); compareObjectAndInfo(save21, origobjs.get(0), user1, info2.getId(), clone2.getName(), id, "orig", 1); compareObjectAndInfo(save22, origobjs.get(1), user1, info2.getId(), clone2.getName(), id, "orig", 2); compareObjectAndInfo(save23, origobjs.get(2), user1, info2.getId(), clone2.getName(), id, "orig", 3); hidetarget = ws.getObjectHistory(user1, new ObjectIdentifier(clone2, "hidetarget")); id = hidetarget.get(0).getObjectId(); compareObjectAndInfo(save31, hidetarget.get(0), user1, info2.getId(), clone2.getName(), id, "hidetarget", 1); checkUnhiddenObjectCount(user1, clone2, 3, 4); ws.setWorkspaceDeleted(user1, cp1, true); failClone(user1, cp1, "fakename", null, new NoSuchWorkspaceException("Workspace clone1 is deleted", cp1)); ws.setWorkspaceDeleted(user1, cp1, false); ws.setObjectsDeleted(user1, Arrays.asList(new ObjectIdentifier(cp1, "hide")), true); failClone(null, cp1, "fakename", null, new WorkspaceAuthorizationException("Anonymous users may not read workspace clone1")); failClone(user1, null, "fakename", null, new IllegalArgumentException("Workspace identifier cannot be null")); //workspaceIdentifier used in the workspace method to check ws names tested extensively elsewhere, so just // a couple tests here failClone(user1, cp1, "bar:fakename", null, new IllegalArgumentException( "Workspace name bar:fakename must only contain the user name foo prior to the : delimiter")); failClone(user1, cp1, "9", null, new IllegalArgumentException( "Workspace names cannot be integers: 9")); failClone(user1, cp1, "foo:9", null, new IllegalArgumentException( "Workspace names cannot be integers: foo:9")); failClone(user1, cp1, "foo:fake(name", null, new IllegalArgumentException( "Illegal character in workspace name foo:fake(name: (")); failClone(user2, cp1, "fakename", null, new WorkspaceAuthorizationException("User bar may not read workspace clone1")); failClone(user1, cp1, "newclone2", null, new PreExistingWorkspaceException( "Workspace name newclone2 is already in use")); failClone(user1, new WorkspaceIdentifier("noclone"), "fakename", null, new NoSuchWorkspaceException("No workspace with name noclone exists", cp1)); ws.lockWorkspace(user1, cp1); WorkspaceIdentifier clone3 = new WorkspaceIdentifier("newclone3"); WorkspaceInformation info3 = ws.cloneWorkspace(user1, cp1, clone3.getName(), false, "my desc2", meta); checkWSInfo(clone3, user1, "newclone3", 2, Permission.OWNER, false, info3.getId(), info3.getModDate(), "unlocked", premeta); assertThat("desc ok", ws.getWorkspaceDescription(user1, clone3), is("my desc2")); origobjs = ws.getObjectHistory(user1, new ObjectIdentifier(clone3, "orig")); id = origobjs.get(0).getObjectId(); compareObjectAndInfo(save21, origobjs.get(0), user1, info3.getId(), clone3.getName(), id, "orig", 1); compareObjectAndInfo(save22, origobjs.get(1), user1, info3.getId(), clone3.getName(), id, "orig", 2); compareObjectAndInfo(save23, origobjs.get(2), user1, info3.getId(), clone3.getName(), id, "orig", 3); hidetarget = ws.getObjectHistory(user1, new ObjectIdentifier(clone3, "hidetarget")); id = hidetarget.get(0).getObjectId(); compareObjectAndInfo(save31, hidetarget.get(0), user1, info3.getId(), clone3.getName(), id, "hidetarget", 1); checkUnhiddenObjectCount(user1, clone3, 3, 4); WorkspaceIdentifier clone4 = new WorkspaceIdentifier("newclone4"); ws.cloneWorkspace(user1, cp1, clone4.getName(), true, LONG_TEXT, null); assertThat("desc ok", ws.getWorkspaceDescription(user1, clone4), is(LONG_TEXT.subSequence(0, 1000))); //TODO BF this test should go in metadata class unit tests /* Map<String, String> bigmeta = new HashMap<String, String>(); for (int i = 0; i < 141; i++) { bigmeta.put("thing" + i, TEXT100); } ws.cloneWorkspace(user1, cp1, "fakename", false, "eeswaffertheen", bigmeta); bigmeta.put("thing", TEXT100); failClone(user1, cp1, "fakename", bigmeta, new IllegalArgumentException( "Metadata size of 16076 is > 16000 bytes")); */ ws.setGlobalPermission(user1, clone2, Permission.NONE); ws.setGlobalPermission(user1, clone4, Permission.NONE); } @Test public void lockWorkspace() throws Exception { WorkspaceUser user = new WorkspaceUser("lockuser"); WorkspaceUser user2 = new WorkspaceUser("lockuser2"); WorkspaceIdentifier wsi = lockWS; Map<String, String> meta = new HashMap<String, String>(); meta.put("some meta", "for u"); long wsid = ws.createWorkspace(user, wsi.getName(), false, null, new WorkspaceUserMetadata(meta)).getId(); ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new HashMap<String, String>(), SAFE_TYPE1, new WorkspaceUserMetadata(), new Provenance(user), false)), getIdFactory()); ObjectIdentifier oi = new ObjectIdentifier(wsi, "auto1"); //these should work WorkspaceInformation info = ws.lockWorkspace(user, wsi); checkWSInfo(info, user, "lock", 1, Permission.OWNER, false, "locked", meta); successGetObjects(user, Arrays.asList(oi)); ws.cloneWorkspace(user, wsi, "lockclone", false, null, null); ws.copyObject(user, oi, new ObjectIdentifier(new WorkspaceIdentifier("lockclone"), "foo")); ws.setPermissions(user, wsi, Arrays.asList(user2), Permission.WRITE); ws.setPermissions(user, wsi, Arrays.asList(user2), Permission.NONE); ws.getPermissions(user, Arrays.asList(wsi)); ws.getWorkspaceDescription(user, wsi); ws.getWorkspaceInformation(user, wsi); ws.listObjects(new ListObjectsParameters(user, Arrays.asList(wsi))); //these should not work try { ws.lockWorkspace(user, new WorkspaceIdentifier("nolock")); fail("locked non existant ws"); } catch (NoSuchWorkspaceException e) { assertThat("correct exception", e.getLocalizedMessage(), is("No workspace with name nolock exists")); } ws.createWorkspace(user, "lock2", false, "foo", null); WorkspaceIdentifier wsi2 = new WorkspaceIdentifier("lock2"); try { ws.lockWorkspace(null, wsi2); fail("locked w/o creds"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception", e.getLocalizedMessage(), is("Anonymous users may not lock workspace lock2")); } try { ws.lockWorkspace(user2, wsi2); fail("locked w/o creds"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception", e.getLocalizedMessage(), is("User lockuser2 may not lock workspace lock2")); } ws.setWorkspaceDeleted(user, wsi2, true); try { ws.lockWorkspace(user, wsi2); fail("locked deleted ws"); } catch (NoSuchWorkspaceException e) { assertThat("correct exception", e.getLocalizedMessage(), is("Workspace lock2 is deleted")); } try { ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new HashMap<String, String>(), SAFE_TYPE1, new WorkspaceUserMetadata(), new Provenance(user), false)), getIdFactory()); fail("saved to locked workspace"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception", e.getLocalizedMessage(), is("The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.copyObject(user, oi, new ObjectIdentifier(wsi, "foo")); fail("copied to locked workspace"); } catch (InaccessibleObjectException e) { assertThat("correct exception", e.getLocalizedMessage(), is("Object foo cannot be accessed: The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.revertObject(user, oi); fail("revert to locked workspace"); } catch (InaccessibleObjectException e) { assertThat("correct exception", e.getLocalizedMessage(), is("Object auto1 cannot be accessed: The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.lockWorkspace(user, wsi); fail("locked locked workspace"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception", e.getLocalizedMessage(), is("The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.renameObject(user, oi, "boo"); fail("renamed locked workspace obj"); } catch (InaccessibleObjectException e) { assertThat("correct exception", e.getLocalizedMessage(), is("Object auto1 cannot be accessed: The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.renameWorkspace(user, wsi, "foo"); fail("renamed locked workspace obj"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception", e.getLocalizedMessage(), is("The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.setObjectsDeleted(user, Arrays.asList(oi), true); fail("deleted locked workspace obj"); } catch (InaccessibleObjectException e) { assertThat("correct exception", e.getLocalizedMessage(), is("Object auto1 cannot be accessed: The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.setObjectsHidden(user, Arrays.asList(oi), true); fail("hid locked workspace obj"); } catch (InaccessibleObjectException e) { assertThat("correct exception", e.getLocalizedMessage(), is("Object auto1 cannot be accessed: The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.setWorkspaceDeleted(user, wsi, true); fail("deleted locked workspace"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception", e.getLocalizedMessage(), is("The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.setWorkspaceDescription(user, wsi, "wugga"); fail("set desc on locked ws"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception", e.getLocalizedMessage(), is("The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.getWorkspaceDescription(user2, wsi); fail("bad access to locked workspace"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception", e.getLocalizedMessage(), is("User lockuser2 may not read workspace lock")); } failWSMeta(user2, wsi, "some meta", "val", new WorkspaceAuthorizationException( "The workspace with id " + wsid + ", name lock, is locked and may not be modified")); //should work ws.setGlobalPermission(user, wsi, Permission.READ); checkWSInfo(ws.getWorkspaceInformation(user, wsi), user, "lock", 1, Permission.OWNER, true, "published", meta); checkWSInfo(ws.getWorkspaceInformation(user2, wsi), user, "lock", 1, Permission.NONE, true, "published", meta); ws.getWorkspaceDescription(user2, wsi); //shouldn't try { ws.setGlobalPermission(user, wsi, Permission.NONE); fail("bad access to locked workspace"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception", e.getLocalizedMessage(), is("The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } } @Test public void renameObject() throws Exception { WorkspaceUser user = new WorkspaceUser("renameObjUser"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("renameObj"); WorkspaceUser user2 = new WorkspaceUser("renameObjUser2"); WorkspaceIdentifier wsi2 = new WorkspaceIdentifier("renameObj2"); WorkspaceInformation info1 = ws.createWorkspace(user, wsi.getName(), false, null, null); long wsid1 = info1.getId(); Date lastWSDate = info1.getModDate(); ws.createWorkspace(user2, wsi2.getName(), false, null, null); ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(user), false)), getIdFactory()); ws.saveObjects(user2, wsi2, Arrays.asList(new WorkspaceSaveObject( new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(user), false)), getIdFactory()); lastWSDate = ws.getWorkspaceInformation(user, wsi).getModDate(); ObjectInformation info = ws.renameObject(user, new ObjectIdentifier(wsi, "auto1"), "mynewname"); assertWorkspaceDateUpdated(user, wsi, lastWSDate, "ws date updated on rename"); checkObjInfo(info, 1L, "mynewname", SAFE_TYPE1.getTypeString(), 1, user, wsid1, "renameObj", "99914b932bd37a50b983c5e7c90ae93b", 2, null); String newname = ws.listObjects(new ListObjectsParameters(user, Arrays.asList(wsi))) .get(0).getObjectName(); assertThat("object renamed", newname, is("mynewname")); ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("myoldname"), new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(user), false)), getIdFactory()); failObjRename(user, new ObjectIdentifier(wsi, "mynewname"), "bad%name", new IllegalArgumentException( "Illegal character in object name bad%name: %")); failObjRename(user, new ObjectIdentifier(wsi, "mynewname"), "2", new IllegalArgumentException( "Object names cannot be integers: 2")); failObjRename(user, new ObjectIdentifier(wsi, "mynewname"), "myoldname", new IllegalArgumentException( "There is already an object in the workspace named myoldname")); failObjRename(user, new ObjectIdentifier(wsi, "mynewname"), "mynewname", new IllegalArgumentException( "Object is already named mynewname")); failObjRename(user, new ObjectIdentifier(wsi, "bar"), "foo", new NoSuchObjectException( "No object with name bar exists in workspace " + wsid1)); failObjRename(user, new ObjectIdentifier(wsi2, "auto1"), "foo", new InaccessibleObjectException( "Object auto1 cannot be accessed: User renameObjUser may not rename objects in workspace renameObj2")); failObjRename(null, new ObjectIdentifier(wsi2, "auto1"), "foo", new InaccessibleObjectException( "Object auto1 cannot be accessed: Anonymous users may not rename objects in workspace renameObj2")); ws.setObjectsDeleted(user, Arrays.asList(new ObjectIdentifier(wsi, "mynewname")), true); failObjRename(user, new ObjectIdentifier(wsi, "mynewname"), "foo", new InaccessibleObjectException( "Object 1 (name mynewname) in workspace " + wsid1 + " has been deleted")); ws.setWorkspaceDeleted(user, wsi, true); failObjRename(user, new ObjectIdentifier(wsi, "mynewname"), "foo", new InaccessibleObjectException( "Object mynewname cannot be accessed: Workspace renameObj is deleted")); ws.setWorkspaceDeleted(user, wsi, false); failObjRename(user, new ObjectIdentifier(new WorkspaceIdentifier("renameObjfake"), "mynewname"), "foo", new InaccessibleObjectException( "Object mynewname cannot be accessed: No workspace with name renameObjfake exists")); ws.lockWorkspace(user, wsi); failObjRename(user, new ObjectIdentifier(wsi, "mynewname"), "foo", new InaccessibleObjectException( "Object mynewname cannot be accessed: The workspace with id " + wsid1 + ", name renameObj, is locked and may not be modified")); } @Test public void renameWorkspace() throws Exception { WorkspaceUser user = new WorkspaceUser("renameWSUser"); WorkspaceUser user2 = new WorkspaceUser("renameWSUser2"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("renameWS"); WorkspaceIdentifier wsi2 = new WorkspaceIdentifier("renameWS2"); Map<String, String> meta = new HashMap<String, String>(); meta.put("?", "42"); meta.put("Panic", "towel"); WorkspaceInformation info1 = ws.createWorkspace(user, wsi.getName(), false, null, new WorkspaceUserMetadata(meta)); WorkspaceIdentifier newwsi = new WorkspaceIdentifier(user.getUser() + ":newRenameWS"); Thread.sleep(2); //make sure timestamp is different on rename WorkspaceInformation info2 = ws.renameWorkspace(user, wsi, newwsi.getName()); checkWSInfo(info2, user, newwsi.getName(), 0, Permission.OWNER, false, "unlocked", meta); assertTrue("date updated on ws rename", info2.getModDate().after(info1.getModDate())); checkWSInfo(ws.getWorkspaceInformation(user, newwsi), user, newwsi.getName(), 0, Permission.OWNER, false, "unlocked", meta); failWSRename(user, newwsi, "foo|bar", new IllegalArgumentException("Illegal character in workspace name foo|bar: |")); failWSRename(user, newwsi, "renameWSUser:9", new IllegalArgumentException("Workspace names cannot be integers: renameWSUser:9")); failWSRename(user, newwsi, "9", new IllegalArgumentException("Workspace names cannot be integers: 9")); failWSRename(user, newwsi, "foo:foobar", new IllegalArgumentException( "Workspace name foo:foobar must only contain the user name renameWSUser prior to the : delimiter")); ws.createWorkspace(user2, wsi2.getName(), false, null, null); ws.setPermissions(user2, wsi2, Arrays.asList(user), Permission.WRITE); failWSRename(user, newwsi, "renameWS2", new IllegalArgumentException("There is already a workspace named renameWS2")); failWSRename(user, newwsi, newwsi.getName(), new IllegalArgumentException("Workspace is already named renameWSUser:newRenameWS")); failWSRename(user, new WorkspaceIdentifier(newwsi.getName() + "a"), newwsi.getName(), new NoSuchWorkspaceException("No workspace with name renameWSUser:newRenameWSa exists", wsi)); failWSRename(user, wsi2, newwsi.getName(), new WorkspaceAuthorizationException("User renameWSUser may not rename workspace renameWS2")); failWSRename(null, newwsi, "renamefoo", new WorkspaceAuthorizationException("Anonymous users may not rename workspace renameWSUser:newRenameWS")); ws.setWorkspaceDeleted(user, newwsi, true); failWSRename(user, newwsi, "renamefoo", new NoSuchWorkspaceException("Workspace " + newwsi.getName() + " is deleted", newwsi)); ws.setWorkspaceDeleted(user, newwsi, false); ws.lockWorkspace(user, newwsi); failWSRename(user, newwsi, "renamefoo", new WorkspaceAuthorizationException("The workspace with id " + info1.getId() + ", name " + newwsi.getName() + ", is locked and may not be modified")); } @Test public void setGlobalRead() throws Exception { WorkspaceUser user = new WorkspaceUser("setGlobalUser"); WorkspaceUser user2 = new WorkspaceUser("setGlobalUser2"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("global"); long wsid = ws.createWorkspace(user, wsi.getName(), false, null, null).getId(); failGetWorkspaceDesc(user2, wsi, new WorkspaceAuthorizationException( "User setGlobalUser2 may not read workspace global")); ws.setGlobalPermission(user, wsi, Permission.READ); assertThat("read set correctly", ws.getPermissions(user, Arrays.asList(wsi)).get(0).get(new AllUsers('*')), is(Permission.READ)); ws.getWorkspaceDescription(user2, wsi); failSetGlobalPerm(user, null, Permission.READ, new IllegalArgumentException( "Workspace identifier cannot be null")); failSetGlobalPerm(user, wsi, Permission.WRITE, new IllegalArgumentException( "Global permissions cannot be greater than read")); failSetGlobalPerm(user2, wsi, Permission.NONE, new WorkspaceAuthorizationException( "User setGlobalUser2 may not set global permission on workspace global")); failSetGlobalPerm(null, wsi, Permission.NONE, new WorkspaceAuthorizationException( "Anonymous users may not set global permission on workspace global")); ws.setWorkspaceDeleted(user, wsi, true); failSetGlobalPerm(user, wsi, Permission.NONE, new NoSuchWorkspaceException( "Workspace global is deleted", wsi)); ws.setWorkspaceDeleted(user, wsi, false); ws.setGlobalPermission(user, wsi, Permission.NONE); ws.lockWorkspace(user, wsi); failSetGlobalPerm(user, wsi, Permission.NONE, new WorkspaceAuthorizationException( "The workspace with id " + wsid + ", name global, is locked and may not be modified")); //this is tested in lockWorkspace // ws.setGlobalPermission(user, wsi, Permission.READ); // assertThat("read set correctly on locked ws", ws.getPermissions(user, wsi).get(new AllUsers('*')), // is(Permission.READ)); } @Test public void hiddenObjects() throws Exception { WorkspaceUser user = new WorkspaceUser("hideObjUser"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("hideObj"); WorkspaceUser user2 = new WorkspaceUser("hideObjUser2"); long wsid1 = ws.createWorkspace(user, wsi.getName(), false, null, null).getId(); ObjectInformation auto1 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation auto2 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(user), true)), getIdFactory()).get(0); ObjectInformation obj1 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("obj1"), new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(user), true)), getIdFactory()).get(0); List<ObjectInformation> expected = new ArrayList<ObjectInformation>(); expected.add(auto1); ListObjectsParameters lop = new ListObjectsParameters(user, Arrays.asList(wsi)) .withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop), expected); expected.add(auto2); expected.add(obj1); compareObjectInfo(ws.listObjects(lop.withShowHidden(true)), expected); ws.setObjectsHidden(user, Arrays.asList(new ObjectIdentifier(wsi, 3), new ObjectIdentifier(wsi, "auto2")), false); compareObjectInfo(ws.listObjects(lop.withShowHidden(false)), expected); ws.setObjectsHidden(user, Arrays.asList(new ObjectIdentifier(wsi, 1), new ObjectIdentifier(wsi, "obj1")), true); expected.remove(auto1); expected.remove(obj1); compareObjectInfo(ws.listObjects(lop), expected); failSetHide(user, new ObjectIdentifier(wsi, "fake"), true, new NoSuchObjectException( "No object with name fake exists in workspace " + wsid1)); failSetHide(user, new ObjectIdentifier(new WorkspaceIdentifier("fake"), "fake"), true, new InaccessibleObjectException( "Object fake cannot be accessed: No workspace with name fake exists")); failSetHide(user2, new ObjectIdentifier(wsi, "auto1"), true, new InaccessibleObjectException( "Object auto1 cannot be accessed: User hideObjUser2 may not hide objects from workspace hideObj")); failSetHide(null, new ObjectIdentifier(wsi, "auto1"), true, new InaccessibleObjectException( "Object auto1 cannot be accessed: Anonymous users may not hide objects from workspace hideObj")); ws.setObjectsDeleted(user, Arrays.asList(new ObjectIdentifier(wsi, 3)), true); failSetHide(user, new ObjectIdentifier(wsi, 3), true, new NoSuchObjectException( "Object 3 (name obj1) in workspace " + wsid1 + " has been deleted")); ws.setObjectsDeleted(user, Arrays.asList(new ObjectIdentifier(wsi, 3)), false); ws.setWorkspaceDeleted(user, wsi, true); failSetHide(user, new ObjectIdentifier(new WorkspaceIdentifier("fake"), "fake"), true, new InaccessibleObjectException( "Object fake cannot be accessed: No workspace with name fake exists")); ws.setWorkspaceDeleted(user, wsi, false); ws.lockWorkspace(user, wsi); failSetHide(user, new ObjectIdentifier(wsi, 3), true, new InaccessibleObjectException( "Object 3 cannot be accessed: The workspace with id " + wsid1 + ", name hideObj, is locked and may not be modified")); } @Test public void listWorkspaces() throws Exception { WorkspaceUser user = new WorkspaceUser("listUser"); WorkspaceUser user2 = new WorkspaceUser("listUser2"); WorkspaceUser user3 = new WorkspaceUser("listUser3"); Map<String, String> premeta1 = new HashMap<String, String>(); premeta1.put("this is", "some meta meta"); premeta1.put("bro", "heim"); WorkspaceUserMetadata meta1 = new WorkspaceUserMetadata(premeta1); Map<String, String> premeta2 = new HashMap<String, String>(); premeta2.put("suckmaster", "burstingfoam"); WorkspaceUserMetadata meta2 = new WorkspaceUserMetadata(premeta2); WorkspaceInformation stdws = ws.createWorkspace(user, "stdws", false, null, meta1); WorkspaceInformation globalws = ws.createWorkspace(user, "globalws", true, null, meta2); WorkspaceInformation deletedws = ws.createWorkspace(user, "deletedws", false, null, null); ws.setWorkspaceDeleted(user, new WorkspaceIdentifier("deletedws"), true); ws.createWorkspace(user2, "readable", false, null, meta1); ws.setPermissions(user2, new WorkspaceIdentifier("readable"), Arrays.asList(user), Permission.READ); WorkspaceInformation readable = ws.getWorkspaceInformation(user, new WorkspaceIdentifier("readable")); ws.createWorkspace(user2, "writeable", false, null, meta2); ws.setPermissions(user2, new WorkspaceIdentifier("writeable"), Arrays.asList(user), Permission.WRITE); WorkspaceInformation writeable = ws.getWorkspaceInformation(user, new WorkspaceIdentifier("writeable")); ws.createWorkspace(user2, "adminable", false, null, null); ws.setPermissions(user2, new WorkspaceIdentifier("adminable"), Arrays.asList(user), Permission.ADMIN); WorkspaceInformation adminable = ws.getWorkspaceInformation(user, new WorkspaceIdentifier("adminable")); @SuppressWarnings("unused") WorkspaceInformation delreadable = ws.createWorkspace(user2, "delreadable", false, null, meta1); ws.setPermissions(user2, new WorkspaceIdentifier("delreadable"), Arrays.asList(user), Permission.READ); ws.setWorkspaceDeleted(user2, new WorkspaceIdentifier("delreadable"), true); ws.createWorkspace(user2, "globalreadable", true, null, meta2); WorkspaceInformation globalreadable = ws.getWorkspaceInformation(user, new WorkspaceIdentifier("globalreadable")); @SuppressWarnings("unused") WorkspaceInformation deletedglobalreadable = ws.createWorkspace(user2, "deletedglobalreadable", true, null, null); ws.setWorkspaceDeleted(user2, new WorkspaceIdentifier("deletedglobalreadable"), true); @SuppressWarnings("unused") WorkspaceInformation unreadable = ws.createWorkspace(user2, "unreadable", false, null, meta1); ws.createWorkspace(user3, "listuser3ws", false, null, null); ws.setPermissions(user3, new WorkspaceIdentifier("listuser3ws"), Arrays.asList(user), Permission.READ); WorkspaceInformation listuser3 = ws.getWorkspaceInformation(user, new WorkspaceIdentifier("listuser3ws")); ws.createWorkspace(user3, "listuser3glws", true, null, meta2); WorkspaceInformation listuser3gl = ws.getWorkspaceInformation(user, new WorkspaceIdentifier("listuser3glws")); Map<WorkspaceInformation, Boolean> expected = new HashMap<WorkspaceInformation, Boolean>(); expected.put(stdws, false); expected.put(globalws, false); expected.put(readable, false); expected.put(writeable, false); expected.put(adminable, false); expected.put(listuser3, false); checkWSInfoList(ws.listWorkspaces(user, null, null, null, null, null, true, false, false), expected); checkWSInfoList(ws.listWorkspaces(user, null, null, new WorkspaceUserMetadata(MT_META), null, null, true, false, false), expected); expected.put(globalreadable, false); expected.put(listuser3gl, false); WorkspaceInformation locked = null; try { locked = ws.getWorkspaceInformation(user, lockWS); } catch (NoSuchWorkspaceException nswe) { //ignore - means that the locking ws test has not been run yet } if (locked != null) { expected.put(locked, false); } checkWSInfoList(ws.listWorkspaces(user, null, null, null, null, null, false, false, false), expected); expected.put(deletedws, true); checkWSInfoList(ws.listWorkspaces(user, null, null, null, null, null, false, true, false), expected); expected.remove(globalreadable); expected.remove(locked); expected.remove(listuser3gl); checkWSInfoList(ws.listWorkspaces(user, null, null, null, null, null, true, true, false), expected); checkWSInfoList(ws.listWorkspaces(user, Permission.NONE, null, null, null, null, true, true, false), expected); checkWSInfoList(ws.listWorkspaces(user, Permission.READ, null, null, null, null, true, true, false), expected); expected.remove(readable); expected.remove(listuser3); checkWSInfoList(ws.listWorkspaces(user, Permission.WRITE, null, null, null, null, true, true, false), expected); expected.remove(writeable); checkWSInfoList(ws.listWorkspaces(user, Permission.ADMIN, null, null, null, null, true, true, false), expected); expected.clear(); expected.put(globalreadable, false); expected.put(listuser3gl, false); if (locked != null) { expected.put(locked, false); } WorkspaceUser newb = new WorkspaceUser("listUserAZillion"); expected.put(ws.getWorkspaceInformation(newb, new WorkspaceIdentifier("globalws")), false); checkWSInfoList(ws.listWorkspaces(newb, null, null, null, null, null, false, false, false), expected); expected.clear(); checkWSInfoList(ws.listWorkspaces(newb, null, null, null, null, null, false, false, true), expected); checkWSInfoList(ws.listWorkspaces(newb, null, null, null, null, null, true, false, false), expected); expected.put(deletedws, true); checkWSInfoList(ws.listWorkspaces(user, null, null, null, null, null, false, false, true), expected); checkWSInfoList(ws.listWorkspaces(user, null, null, null, null, null, false, true, true), expected); checkWSInfoList(ws.listWorkspaces(user, null, null, null, null, null, true, true, true), expected); checkWSInfoList(ws.listWorkspaces(user, null, null, null, null, null, false, false, true), expected); expected.clear(); expected.put(stdws, false); expected.put(globalws, false); checkWSInfoList(ws.listWorkspaces(user, null, Arrays.asList(user), null, null, null, false, false, false), expected); expected.put(readable, false); expected.put(writeable, false); expected.put(adminable, false); expected.put(globalreadable, false); checkWSInfoList(ws.listWorkspaces(user, null, Arrays.asList(user, user2), null, null, null, false, false, false), expected); expected.put(listuser3, false); expected.put(listuser3gl, false); checkWSInfoList(ws.listWorkspaces(user, null, Arrays.asList(user, user2, user3), null, null, null, false, false, false), expected); expected.remove(globalreadable); expected.remove(listuser3gl); checkWSInfoList(ws.listWorkspaces(user, null, Arrays.asList(user, user2, user3), null, null, null, true, false, false), expected); expected.remove(stdws); expected.remove(globalws); checkWSInfoList(ws.listWorkspaces(user, null, Arrays.asList(user2, user3), null, null, null, true, false, false), expected); expected.remove(readable); expected.remove(writeable); expected.remove(adminable); checkWSInfoList(ws.listWorkspaces(user, null, Arrays.asList(user3), null, null, null, true, false, false), expected); Map<String, String> querymeta = new HashMap<String, String>(); querymeta.put("suckmaster", "burstingfoam"); expected.clear(); expected.put(globalws, false); expected.put(writeable, false); expected.put(globalreadable, false); expected.put(listuser3gl, false); checkWSInfoList(ws.listWorkspaces(user, null, null, new WorkspaceUserMetadata(querymeta), null, null, false, false, false), expected); querymeta.clear(); querymeta.put("this is", "some meta meta"); expected.clear(); expected.put(stdws, false); expected.put(readable, false); checkWSInfoList(ws.listWorkspaces(user, null, null, new WorkspaceUserMetadata(querymeta), null, null, false, false, false), expected); querymeta.clear(); querymeta.put("bro", "heim"); checkWSInfoList(ws.listWorkspaces(user, null, null, new WorkspaceUserMetadata(querymeta), null, null, false, false, false), expected); try { ws.listWorkspaces(user, null, null, meta1, null, null, false, false, false); fail("listed ws with bad meta"); } catch (IllegalArgumentException exp) { assertThat("correct exception", exp.getLocalizedMessage(), is("Only one metadata spec allowed")); } ws.setGlobalPermission(user2, new WorkspaceIdentifier("globalreadable"), Permission.NONE); ws.setWorkspaceDeleted(user2, new WorkspaceIdentifier("deletedglobalreadable"), false); ws.setGlobalPermission(user2, new WorkspaceIdentifier("deletedglobalreadable"), Permission.NONE); ws.setGlobalPermission(user, new WorkspaceIdentifier("globalws"), Permission.NONE); ws.setGlobalPermission(user3, new WorkspaceIdentifier("listuser3glws"), Permission.NONE); } @Test public void listWorkspacesByDate() throws Exception { WorkspaceUser u = new WorkspaceUser("listwsbydate"); WorkspaceInformation i1 = ws.createWorkspace(u, "listwsbydate1", false, null, null); Thread.sleep(100); WorkspaceInformation i2 = ws.createWorkspace(u, "listwsbydate2", false, null, null); Thread.sleep(100); WorkspaceInformation i3 = ws.createWorkspace(u, "listwsbydate3", false, null, null); Thread.sleep(100); WorkspaceInformation i4 = ws.createWorkspace(u, "listwsbydate4", false, null, null); Thread.sleep(100); WorkspaceInformation i5 = ws.createWorkspace(u, "listwsbydate5", false, null, null); Date beforeall = new Date(i1.getModDate().getTime() - 1); Date afterall = new Date(i5.getModDate().getTime() + 1); checkWSInfoList(ws.listWorkspaces(u, null, null, null, null, null, true, false, false), Arrays.asList(i1, i2, i3, i4, i5)); checkWSInfoList(ws.listWorkspaces(u, null, null, null, beforeall, afterall, true, false, false), Arrays.asList(i1, i2, i3, i4, i5)); checkWSInfoList(ws.listWorkspaces(u, null, null, null, afterall, beforeall, true, false, false), new ArrayList<WorkspaceInformation>()); checkWSInfoList(ws.listWorkspaces(u, null, null, null, i3.getModDate(), i4.getModDate(), true, false, false), new ArrayList<WorkspaceInformation>()); checkWSInfoList(ws.listWorkspaces(u, null, null, null, i2.getModDate(), i4.getModDate(), true, false, false), Arrays.asList(i3)); checkWSInfoList(ws.listWorkspaces(u, null, null, null, i2.getModDate(), null, true, false, false), Arrays.asList(i3, i4, i5)); checkWSInfoList(ws.listWorkspaces(u, null, null, null, null, i4.getModDate(), true, false, false), Arrays.asList(i1, i2, i3)); checkWSInfoList(ws.listWorkspaces(u, null, null, null, new Date(i2.getModDate().getTime() - 1), i5.getModDate(), true, false, false), Arrays.asList(i2, i3, i4)); } @Test public void listObjectsWithDeletedObjects() throws Exception { /* Test that deleted objects only show up in the objects list when * requested *and* when the user has permission to write to the * workspace, which is required for listing deleted objects. */ WorkspaceUser u1 = new WorkspaceUser("listObjDelUser1"); WorkspaceUser u2 = new WorkspaceUser("listObjDelUser2"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("listObjDel"); ws.createWorkspace(u1, wsi.getName(), false, null, null); ws.setPermissions(u1, wsi, Arrays.asList(u2), Permission.READ); ObjectInformation std = ws.saveObjects(u1, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("std"), new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(u1), false)), getIdFactory()).get(0); ObjectInformation del = ws.saveObjects(u1, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("del"), new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(u1), false)), getIdFactory()).get(0); ws.setObjectsDeleted(u1, Arrays.asList(new ObjectIdentifier(wsi, "del")), true); ListObjectsParameters lop = new ListObjectsParameters(u1, Arrays.asList(wsi)) .withIncludeMetaData(true); //test user1 - owner. Should always see deleted if requested. compareObjectInfo(ws.listObjects(lop), Arrays.asList(std)); compareObjectInfo(ws.listObjects(lop.withShowDeleted(true)), Arrays.asList(std, del)); compareObjectInfo(ws.listObjects(lop.withShowDeleted(false).withShowOnlyDeleted(true)), Arrays.asList(del)); lop = new ListObjectsParameters(u2, Arrays.asList(wsi)) .withIncludeMetaData(true); //test user2 with only read perms. Should never see deleted objects. compareObjectInfo(ws.listObjects(lop), Arrays.asList(std)); compareObjectInfo(ws.listObjects(lop.withShowDeleted(true)), Arrays.asList(std)); compareObjectInfo(ws.listObjects(lop.withShowDeleted(false).withShowOnlyDeleted(true)), new LinkedList<ObjectInformation>()); //test user2 with write perms. Should always see deleted if requested. ws.setPermissions(u1, wsi, Arrays.asList(u2), Permission.WRITE); compareObjectInfo(ws.listObjects(lop.withShowOnlyDeleted(false)), Arrays.asList(std)); compareObjectInfo(ws.listObjects(lop.withShowDeleted(true)), Arrays.asList(std, del)); compareObjectInfo(ws.listObjects(lop.withShowDeleted(false).withShowOnlyDeleted(true)), Arrays.asList(del)); } @Test public void listObjectsWithDeletedWorkspace() throws Exception { /* Test that objects from a deleted workspace don't show up in * listObjects output. */ WorkspaceUser u1 = new WorkspaceUser("listObjDelWSUser"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("listObjDelWS"); WorkspaceIdentifier wsdel = new WorkspaceIdentifier("listObjDelWS_Deleted"); ws.createWorkspace(u1, wsi.getName(), false, null, null); ws.createWorkspace(u1, wsdel.getName(), false, null, null); Map<String, String> pmeta = new HashMap<String, String>(); pmeta.put("test", "listObjDelWS"); WorkspaceUserMetadata meta = new WorkspaceUserMetadata(pmeta); ObjectInformation std = ws.saveObjects(u1, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("std"), new HashMap<String, String>(), SAFE_TYPE1, meta, new Provenance(u1), false)), getIdFactory()).get(0); ws.saveObjects(u1, wsdel, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("del"), new HashMap<String, String>(), SAFE_TYPE1, meta, new Provenance(u1), false)), getIdFactory()).get(0); ws.setWorkspaceDeleted(u1, wsdel, true); ListObjectsParameters lop = new ListObjectsParameters(u1, SAFE_TYPE1) .withMetadata(meta).withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop), Arrays.asList(std)); compareObjectInfo(ws.listObjects(lop.withShowDeleted(true)), Arrays.asList(std)); compareObjectInfo(ws.listObjects(lop.withShowDeleted(false).withShowOnlyDeleted(true)), new LinkedList<ObjectInformation>()); } @Test public void listObjectsAndHistory() throws Exception { WorkspaceUser user = new WorkspaceUser("listObjUser"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("listObj1"); WorkspaceIdentifier readable = new WorkspaceIdentifier("listObjread"); WorkspaceIdentifier writeable = new WorkspaceIdentifier("listObjwrite"); WorkspaceIdentifier adminable = new WorkspaceIdentifier("listObjadmin"); WorkspaceIdentifier thirdparty = new WorkspaceIdentifier("thirdparty"); WorkspaceUser user2 = new WorkspaceUser("listObjUser2"); long wsid1 = ws.createWorkspace(user, wsi.getName(), false, null, null).getId(); ws.createWorkspace(user2, readable.getName(), false, null, null).getId(); ws.setPermissions(user2, readable, Arrays.asList(user), Permission.READ); long wsidwrite = ws.createWorkspace(user2, writeable.getName(), false, null, null).getId(); ws.setPermissions(user2, writeable, Arrays.asList(user), Permission.WRITE); ws.createWorkspace(user2, adminable.getName(), false, null, null).getId(); ws.setPermissions(user2, adminable, Arrays.asList(user), Permission.ADMIN); WorkspaceUser user3 = new WorkspaceUser("listObjUser3"); ws.createWorkspace(user3, thirdparty.getName(), true, null, null).getId(); Map<String, String> pmeta = new HashMap<String, String>(); pmeta.put("meta1", "1"); Map<String, String> pmeta2 = new HashMap<String, String>(); pmeta2.put("meta2", "2"); Map<String, String> pmeta3 = new HashMap<String, String>(); pmeta3.put("meta3", "3"); Map<String, String> pmeta32 = new HashMap<String, String>(); pmeta32.put("meta3", "3"); pmeta32.put("meta2", "2"); WorkspaceUserMetadata meta = new WorkspaceUserMetadata(pmeta); WorkspaceUserMetadata meta2 = new WorkspaceUserMetadata(pmeta2); WorkspaceUserMetadata meta3 = new WorkspaceUserMetadata(pmeta3); WorkspaceUserMetadata meta32 = new WorkspaceUserMetadata(pmeta32); Map<String, Object> passTCdata = new HashMap<String, Object>(); passTCdata.put("thing", "athing"); ObjectInformation std = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("std"), new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation stdnometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(wsi, "std")), false, false).get(0); ObjectInformation objstack1 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("objstack"), new HashMap<String, String>(), SAFE_TYPE1_10, meta, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation objstack1nometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(wsi, "objstack", 1)), false, false).get(0); ObjectInformation objstack2 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("objstack"), passTCdata, SAFE_TYPE1_20, meta2, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation objstack2nometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(wsi, "objstack", 2)), false, false).get(0); ObjectInformation type2_1 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("type2"), new HashMap<String, String>(), SAFE_TYPE2, meta, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation type2_1nometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(wsi, "type2", 1)), false, false).get(0); ObjectInformation type2_2 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("type2"), new HashMap<String, String>(), SAFE_TYPE2_10, meta2, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation type2_2nometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(wsi, "type2", 2)), false, false).get(0); ObjectInformation type2_3 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("type2"), passTCdata, SAFE_TYPE2_20, meta32, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation type2_3nometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(wsi, "type2", 3)), false, false).get(0); ObjectInformation type2_4 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("type2"), passTCdata, SAFE_TYPE2_21, meta3, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation type2_4nometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(wsi, "type2", 4)), false, false).get(0); ObjectInformation stdws2 = ws.saveObjects(user2, writeable, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("stdws2"), new HashMap<String, String>(), SAFE_TYPE1, meta, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation stdws2nometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(writeable, "stdws2")), false, false).get(0); ObjectInformation hidden = ws.saveObjects(user, writeable, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("hidden"), new HashMap<String, String>(), SAFE_TYPE1, meta2, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation hiddennometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(writeable, "hidden")), false, false).get(0); ws.setObjectsHidden(user, Arrays.asList(new ObjectIdentifier(writeable, "hidden")), true); ObjectInformation deleted = ws.saveObjects(user2, writeable, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("deleted"), new HashMap<String, String>(), SAFE_TYPE1, meta32, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation deletednometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(writeable, "deleted")), false, false).get(0); ws.setObjectsDeleted(user, Arrays.asList(new ObjectIdentifier(writeable, "deleted")), true); ObjectInformation readobj = ws.saveObjects(user2, readable, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("readobj"), new HashMap<String, String>(), SAFE_TYPE1, meta3, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation readobjnometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(readable, "readobj")), false, false).get(0); ObjectInformation adminobj = ws.saveObjects(user2, adminable, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("adminobj"), new HashMap<String, String>(), SAFE_TYPE1, meta3, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation adminobjnometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(adminable, "adminobj")), false, false).get(0); ObjectInformation thirdobj = ws.saveObjects(user3, thirdparty, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("thirdobj"), new HashMap<String, String>(), SAFE_TYPE1, meta, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation thirdobjnometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(thirdparty, "thirdobj")), false, false).get(0); //this should be invisible to anyone except user3 ws.saveObjects(user3, thirdparty, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("thirdobjdel"), new HashMap<String, String>(), SAFE_TYPE1, meta, new Provenance(user), false)), getIdFactory()).get(0); ws.setObjectsDeleted(user3, Arrays.asList(new ObjectIdentifier(thirdparty, "thirdobjdel")), true); ObjectInformation lock = null; ObjectInformation locknometa = null; try { ListObjectsParameters lop = new ListObjectsParameters(user, Arrays.asList(lockWS)) .withIncludeMetaData(true); List<ObjectInformation> foo = ws.listObjects(lop); if (foo.size() > 1) { fail("found more than one object in the locked workspace, this is unexpected"); } if (foo.size() == 1) { lock = foo.get(0); locknometa = ws.listObjects(lop.withIncludeMetaData(false)).get(0); } } catch (NoSuchWorkspaceException nswe) { //do nothing, lock workspace wasn't created yet } TypeDefId allType1 = new TypeDefId(SAFE_TYPE1.getType().getTypeString()); TypeDefId allType2 = new TypeDefId(SAFE_TYPE2.getType().getTypeString()); //test with anon user ListObjectsParameters lop = new ListObjectsParameters(null, SAFE_TYPE1) .withShowDeleted(true).withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop), setUpListObjectsExpected(Arrays.asList(thirdobj), lock)); compareObjectInfo(ws.listObjects(lop.withExcludeGlobal(true)), new LinkedList<ObjectInformation>()); //test basics lop = new ListObjectsParameters(user, Arrays.asList(wsi, writeable)) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop), Arrays.asList(std, objstack1, objstack2, type2_1, type2_2, type2_3, type2_4, stdws2, hidden, deleted)); compareObjectInfo(ws.listObjects(lop.withShowOnlyDeleted(true)), Arrays.asList(deleted)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, Arrays.asList(wsi)) .withShowHidden(true).withShowDeleted(true).withShowOnlyDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), new ArrayList<ObjectInformation>()); compareObjectInfo(ws.listObjects(lop.withShowHidden(false).withShowOnlyDeleted(false)), Arrays.asList(std, objstack1, objstack2, type2_1, type2_2, type2_3, type2_4, stdws2, deleted)); compareObjectInfo(ws.listObjects(lop.withShowHidden(true).withShowDeleted(false)), Arrays.asList(std, objstack1, objstack2, type2_1, type2_2, type2_3, type2_4, stdws2, hidden)); compareObjectInfo(ws.listObjects(lop.withShowHidden(false)), Arrays.asList(std, objstack1, objstack2, type2_1, type2_2, type2_3, type2_4, stdws2)); compareObjectInfo(ws.listObjects(lop.withShowHidden(true).withShowDeleted(true) .withShowAllVersions(false)), Arrays.asList(std, objstack2, type2_4, stdws2, hidden, deleted)); compareObjectInfo(ws.listObjects(lop.withShowHidden(false).withShowDeleted(false) .withIncludeMetaData(false)), Arrays.asList(stdnometa, objstack2nometa, type2_4nometa, stdws2nometa)); compareObjectInfo(ws.listObjects(lop.withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true)), Arrays.asList(stdnometa, objstack1nometa, objstack2nometa, type2_1nometa, type2_2nometa, type2_3nometa, type2_4nometa, stdws2nometa, hiddennometa, deletednometa)); lop = new ListObjectsParameters(user, allType1) .withShowHidden(true).withShowDeleted(true).withShowAllVersions(true) .withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop), setUpListObjectsExpected(Arrays.asList(std, objstack1, objstack2, stdws2, hidden, deleted, readobj, adminobj, thirdobj), lock)); compareObjectInfo(ws.listObjects(lop.withSavers(new ArrayList<WorkspaceUser>())), setUpListObjectsExpected(Arrays.asList(std, objstack1, objstack2, stdws2, hidden, deleted, readobj, adminobj, thirdobj), lock)); //exclude globally readable workspaces compareObjectInfo(ws.listObjects(lop.withExcludeGlobal(true)), Arrays.asList(std, objstack1, objstack2, stdws2, hidden, deleted, readobj, adminobj)); //if the globally readable workspace is explicitly listed, should ignore excludeGlobal lop = new ListObjectsParameters(user, Arrays.asList(wsi, writeable, thirdparty)) .withShowHidden(true).withShowDeleted(true).withShowAllVersions(true) .withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop), Arrays.asList(std, objstack1, objstack2, type2_1, type2_2, type2_3, type2_4, stdws2, hidden, deleted, thirdobj)); compareObjectInfo(ws.listObjects(lop.withExcludeGlobal(true)), Arrays.asList(std, objstack1, objstack2, type2_1, type2_2, type2_3, type2_4, stdws2, hidden, deleted, thirdobj)); //test user filtering lop = new ListObjectsParameters(user, allType1) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop.withSavers(Arrays.asList(user, user2, user3))), Arrays.asList(std, objstack1, objstack2, stdws2, hidden, deleted, readobj, adminobj, thirdobj)); compareObjectInfo(ws.listObjects(lop.withSavers(Arrays.asList(user2, user3))), Arrays.asList(stdws2, deleted, readobj, adminobj, thirdobj)); compareObjectInfo(ws.listObjects(lop.withSavers(Arrays.asList(user, user3))), Arrays.asList(std, hidden, objstack1, objstack2, thirdobj)); compareObjectInfo(ws.listObjects(lop.withSavers(Arrays.asList(user3))), Arrays.asList(thirdobj)); compareObjectInfo(ws.listObjects(lop.withSavers(Arrays.asList(user))), Arrays.asList(std, hidden, objstack1, objstack2)); //meta filtering lop = new ListObjectsParameters(user, Arrays.asList(wsi, writeable)) .withMetadata(new WorkspaceUserMetadata()) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop), Arrays.asList(std, objstack1, objstack2, type2_1, type2_2, type2_3, type2_4, stdws2, hidden, deleted)); compareObjectInfo(ws.listObjects(lop.withMetadata(meta)), Arrays.asList(objstack1, type2_1, stdws2)); compareObjectInfo(ws.listObjects(lop.withMetadata(meta2)), Arrays.asList(objstack2, type2_2, type2_3, hidden, deleted)); compareObjectInfo(ws.listObjects(lop.withMetadata(meta3)), Arrays.asList(type2_3, type2_4, deleted)); compareObjectInfo(ws.listObjects(lop.withMetadata(meta).withShowAllVersions(false)), Arrays.asList(stdws2)); compareObjectInfo(ws.listObjects(lop.withMetadata(meta2)), Arrays.asList(objstack2, hidden, deleted)); //type filtering compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, Arrays.asList(wsi), allType1) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(std, objstack1, objstack2)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, Arrays.asList(writeable), allType1) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(stdws2, hidden, deleted)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, allType2) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(type2_1, type2_2, type2_3, type2_4)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, Arrays.asList(writeable), allType2) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), new ArrayList<ObjectInformation>()); //permission filtering lop = new ListObjectsParameters(user, SAFE_TYPE1) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop), setUpListObjectsExpected(Arrays.asList(std, stdws2, hidden, deleted, readobj, adminobj, thirdobj), lock)); compareObjectInfo(ws.listObjects(lop.withIncludeMetaData(false)), setUpListObjectsExpected(Arrays.asList(stdnometa, stdws2nometa, hiddennometa, deletednometa, readobjnometa, adminobjnometa, thirdobjnometa), locknometa)); compareObjectInfo(ws.listObjects(lop.withMinimumPermission(Permission.NONE)), setUpListObjectsExpected(Arrays.asList(stdnometa, stdws2nometa, hiddennometa, deletednometa, readobjnometa, adminobjnometa, thirdobjnometa), locknometa)); compareObjectInfo(ws.listObjects(lop.withMinimumPermission(Permission.READ)), setUpListObjectsExpected(Arrays.asList(stdnometa, stdws2nometa, hiddennometa, deletednometa, readobjnometa, adminobjnometa, thirdobjnometa), locknometa)); compareObjectInfo(ws.listObjects(lop.withMinimumPermission(Permission.WRITE)), Arrays.asList(stdnometa, stdws2nometa, hiddennometa, deletednometa, adminobjnometa)); compareObjectInfo(ws.listObjects(lop.withMinimumPermission(Permission.ADMIN)), Arrays.asList(stdnometa, adminobjnometa)); //more type filtering compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, SAFE_TYPE1_10) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(objstack1)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, SAFE_TYPE1_20) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(objstack2)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, SAFE_TYPE2) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(type2_1)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, SAFE_TYPE2_10) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(type2_2)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, SAFE_TYPE2_20) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(type2_3)); compareObjectInfo(ws.listObjects(new ListObjectsParameters( user, new TypeDefId(SAFE_TYPE2_20.getType(), SAFE_TYPE2_20.getMajorVersion())) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(type2_3, type2_4)); compareObjectInfo(ws.listObjects(new ListObjectsParameters( user, new TypeDefId(SAFE_TYPE2_10.getType(), SAFE_TYPE2_10.getMajorVersion())) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(type2_2)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user2, allType1) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), setUpListObjectsExpected(Arrays.asList(stdws2, hidden, deleted, readobj, adminobj, thirdobj), lock)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user2, Arrays.asList(writeable)) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(stdws2, hidden, deleted)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user2, allType2) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), new ArrayList<ObjectInformation>()); //TODO move these to unit tests for LOP // can't test 2 argument constructor with the 2nd constructor argument // null since then constructor is ambiguous try { new ListObjectsParameters(user, new LinkedList<WorkspaceIdentifier>()); fail("Created list objs param with bad init"); } catch (IllegalArgumentException e) { assertThat("Correct exception msg", e.getLocalizedMessage(), is("Must provide at least one workspace")); } try { new ListObjectsParameters(user, null, SAFE_TYPE1); fail("Created list objs param with bad init"); } catch (IllegalArgumentException e) { assertThat("Correct exception msg", e.getLocalizedMessage(), is("Must provide at least one workspace")); } try { new ListObjectsParameters(user, new LinkedList<WorkspaceIdentifier>(), SAFE_TYPE1); fail("Created list objs param with bad init"); } catch (IllegalArgumentException e) { assertThat("Correct exception msg", e.getLocalizedMessage(), is("Must provide at least one workspace")); } try { new ListObjectsParameters(user, Arrays.asList(wsi), null); fail("Created list objs param with bad init"); } catch (NullPointerException e) { assertThat("Correct exception msg", e.getLocalizedMessage(), is("Type cannot be null")); } failListObjects(user2, Arrays.asList(wsi, writeable), null, new WorkspaceAuthorizationException("User listObjUser2 may not read workspace listObj1")); failListObjects(null, Arrays.asList(wsi, writeable), null, new WorkspaceAuthorizationException("Anonymous users may not read workspace listObj1")); failListObjects(user, Arrays.asList(writeable, new WorkspaceIdentifier("listfake")), null, new NoSuchWorkspaceException("No workspace with name listfake exists", wsi)); failListObjects(user, Arrays.asList(wsi, writeable), meta32.getMetadata(), new IllegalArgumentException("Only one metadata spec allowed")); ws.createWorkspace(user, "listdel", false, null, null); ws.setWorkspaceDeleted(user, new WorkspaceIdentifier("listdel"), true); failListObjects(user, Arrays.asList(writeable, new WorkspaceIdentifier("listdel")), null, new NoSuchWorkspaceException("Workspace listdel is deleted", wsi)); assertThat("correct object history for std", ws.getObjectHistory(user, new ObjectIdentifier(wsi, "std")), is(Arrays.asList(std))); assertThat("correct object history for type2", ws.getObjectHistory(user, new ObjectIdentifier(wsi, "type2")), is(Arrays.asList(type2_1, type2_2, type2_3, type2_4))); assertThat("correct object history for type2", ws.getObjectHistory(user, new ObjectIdentifier(wsi, 3)), is(Arrays.asList(type2_1, type2_2, type2_3, type2_4))); assertThat("correct object history for type2", ws.getObjectHistory(user, new ObjectIdentifier(wsi, "type2", 3)), is(Arrays.asList(type2_1, type2_2, type2_3, type2_4))); assertThat("correct object history for type2", ws.getObjectHistory(user, new ObjectIdentifier(wsi, 3, 4)), is(Arrays.asList(type2_1, type2_2, type2_3, type2_4))); assertThat("correct object history for objstack", ws.getObjectHistory(user, new ObjectIdentifier(wsi, "objstack")), is(Arrays.asList(objstack1, objstack2))); assertThat("correct object history for stdws2", ws.getObjectHistory(user2, new ObjectIdentifier(writeable, "stdws2")), is(Arrays.asList(stdws2))); failGetObjectHistory(user, new ObjectIdentifier(wsi, "booger"), new NoSuchObjectException("No object with name booger exists in workspace " + wsid1)); failGetObjectHistory(user, new ObjectIdentifier(new WorkspaceIdentifier("listObjectsfake"), "booger"), new InaccessibleObjectException("Object booger cannot be accessed: No workspace with name listObjectsfake exists")); failGetObjectHistory(user, new ObjectIdentifier(new WorkspaceIdentifier("listdel"), "booger"), new InaccessibleObjectException("Object booger cannot be accessed: Workspace listdel is deleted")); failGetObjectHistory(user2, new ObjectIdentifier(wsi, 3), new InaccessibleObjectException("Object 3 cannot be accessed: User listObjUser2 may not read workspace listObj1")); failGetObjectHistory(null, new ObjectIdentifier(wsi, 3), new InaccessibleObjectException("Object 3 cannot be accessed: Anonymous users may not read workspace listObj1")); failGetObjectHistory(user2, new ObjectIdentifier(writeable, "deleted"), new InaccessibleObjectException("Object 3 (name deleted) in workspace " + wsidwrite + " has been deleted")); ws.setGlobalPermission(user3, new WorkspaceIdentifier("thirdparty"), Permission.NONE); } @Test public void listObjectsByDate() throws Exception { WorkspaceUser u = new WorkspaceUser("listObjsByDate"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("listObjsByDateWS"); ws.createWorkspace(u, wsi.getName(), false, null, null); Map<String, String> data = new HashMap<String, String>(); Provenance p = new Provenance(u); ObjectInformation o1 = saveObject(u, wsi, null, data, SAFE_TYPE1, "o1", p); Thread.sleep(100); ObjectInformation o2 = saveObject(u, wsi, null, data, SAFE_TYPE1, "o2", p); Thread.sleep(100); ObjectInformation o3 = saveObject(u, wsi, null, data, SAFE_TYPE1, "o3", p); Thread.sleep(100); ObjectInformation o4 = saveObject(u, wsi, null, data, SAFE_TYPE1, "o4", p); Thread.sleep(100); ObjectInformation o5 = saveObject(u, wsi, null, data, SAFE_TYPE1, "o5", p); Date beforeall = new Date(o1.getSavedDate().getTime() - 1); Date afterall = new Date(o5.getSavedDate().getTime() + 1); ListObjectsParameters lop = new ListObjectsParameters(u, Arrays.asList(wsi)) .withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop), Arrays.asList(o1, o2, o3, o4, o5)); compareObjectInfo(ws.listObjects(lop.withAfter(beforeall).withBefore(afterall)), Arrays.asList(o1, o2, o3, o4, o5)); compareObjectInfo(ws.listObjects(lop.withAfter(afterall).withBefore(beforeall)), new ArrayList<ObjectInformation>()); compareObjectInfo(ws.listObjects(lop.withAfter(o3.getSavedDate()).withBefore(o4.getSavedDate())), new ArrayList<ObjectInformation>()); compareObjectInfo(ws.listObjects(lop.withAfter(o2.getSavedDate()).withBefore(null)), Arrays.asList(o3, o4, o5)); compareObjectInfo(ws.listObjects(lop.withAfter(null).withBefore(o4.getSavedDate())), Arrays.asList(o1, o2, o3)); compareObjectInfo(ws.listObjects(lop.withAfter(o2.getSavedDate()).withBefore(o4.getSavedDate())), Arrays.asList(o3)); compareObjectInfo(ws.listObjects(lop.withAfter(new Date(o2.getSavedDate().getTime() -1)) .withBefore(o5.getSavedDate())), Arrays.asList(o2, o3, o4)); } @Test public void getObjectSubdata() throws Exception { /* note most tests are performed at the same time as getObjects, so * only issues specific to subsets are tested here */ WorkspaceUser user = new WorkspaceUser("subUser"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("subData"); WorkspaceUser user2 = new WorkspaceUser("subUser2"); long wsid1 = ws.createWorkspace(user, wsi.getName(), false, null, null).getId(); TypeDefId reftype = new TypeDefId(new TypeDefName("CopyRev", "RefType"), 1, 0); Map<String, String> pmeta = new HashMap<String, String>(); pmeta.put("metastuff", "meta"); Map<String, String> pmeta2 = new HashMap<String, String>(); pmeta2.put("meta2", "my hovercraft is full of eels"); WorkspaceUserMetadata meta = new WorkspaceUserMetadata(pmeta); WorkspaceUserMetadata meta2 = new WorkspaceUserMetadata(pmeta2); Provenance p1 = new Provenance(user); p1.addAction(new ProvenanceAction().withDescription("provenance 1") .withWorkspaceObjects(Arrays.asList("subData/auto1"))); Provenance p2 = new Provenance(user); p2.addAction(new ProvenanceAction().withDescription("provenance 2") .withWorkspaceObjects(Arrays.asList("subData/auto2"))); Map<String, Object> data1 = createData( "{\"map\": {\"id1\": {\"id\": 1," + " \"thing\": \"foo\"}," + " \"id2\": {\"id\": 2," + " \"thing\": \"foo2\"}," + " \"id3\": {\"id\": 3," + " \"thing\": \"foo3\"}" + " }," + " \"refs\": [\"subData/auto1\"]" + "}" ); Map<String, Object> data2 = createData( "{\"array\": [{\"id\": 1," + " \"thing\": \"foo\"}," + " {\"id\": 2," + " \"thing\": \"foo2\"}," + " {\"id\": 3," + " \"thing\": \"foo3\"}" + " ]," + " \"refs\": [\"subData/auto2\"]" + "}" ); Map<String, Object> data3 = createData( "{\"array\": [{\"id\": 1," + " \"thing\": \"foo\"}," + " {\"id\": 2," + " \"thing\": \"foo2\"}," + " null," + " {\"id\": 4," + " \"thing\": \"foo4\"}" + " ]," + " \"refs\": [\"subData/auto2\"]" + "}" ); ws.saveObjects(user, wsi, Arrays.asList( new WorkspaceSaveObject(data1, SAFE_TYPE1, meta, new Provenance(user), false), new WorkspaceSaveObject(data1, SAFE_TYPE1, meta, new Provenance(user), false)), getIdFactory()); ObjectInformation o1 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("o1"), data1, reftype, meta, p1, false)), getIdFactory()).get(0); ObjectInformation o2 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("o2"), data2, reftype, meta2, p2, false)), getIdFactory()).get(0); ObjectInformation o3 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("o3"), data3, reftype, meta, p2, false)), getIdFactory()).get(0); ObjectIdentifier oident1 = new ObjectIdentifier(wsi, "o1"); ObjectIdentifier oident2 = new ObjectIdentifier(wsi, 4); ObjectIdentifier oident3 = ObjectIdentifier.parseObjectReference("subData/o3"); List<String> refs1 = Arrays.asList(wsid1 + "/1/1"); Map<String, String> refmap1 = new HashMap<String, String>(); refmap1.put("subData/auto1", wsid1 + "/1/1"); List<String> refs2 = Arrays.asList(wsid1 + "/2/1"); Map<String, String> refmap2 = new HashMap<String, String>(); refmap2.put("subData/auto2", wsid1 + "/2/1"); List<WorkspaceObjectData> got = ws.getObjectsSubSet(user, Arrays.asList( new SubObjectIdentifier(oident1, new ObjectPaths( Arrays.asList("/map/id3", "/map/id1"))), new SubObjectIdentifier(oident1, new ObjectPaths( Arrays.asList("/map/id2"))), new SubObjectIdentifier(oident2, new ObjectPaths( Arrays.asList("/array/2", "/array/0"))), new SubObjectIdentifier(oident3, new ObjectPaths( Arrays.asList("/array/2", "/array/0", "/array/3"))))); Map<String, Object> expdata1 = createData( "{\"map\": {\"id1\": {\"id\": 1," + " \"thing\": \"foo\"}," + " \"id3\": {\"id\": 3," + " \"thing\": \"foo3\"}" + " }" + "}" ); Map<String, Object> expdata2 = createData( "{\"map\": {\"id2\": {\"id\": 2," + " \"thing\": \"foo2\"}" + " }" + "}" ); Map<String, Object> expdata3 = createData( "{\"array\": [{\"id\": 1," + " \"thing\": \"foo\"}," + " {\"id\": 3," + " \"thing\": \"foo3\"}" + " ]" + "}" ); Map<String, Object> expdata4 = createData( "{\"array\": [{\"id\": 1," + " \"thing\": \"foo\"}," + " null," + " {\"id\": 4," + " \"thing\": \"foo4\"}" + " ]" + "}" ); compareObjectAndInfo(got.get(0), o1, p1, expdata1, refs1, refmap1); compareObjectAndInfo(got.get(1), o1, p1, expdata2, refs1, refmap1); compareObjectAndInfo(got.get(2), o2, p2, expdata3, refs2, refmap2); compareObjectAndInfo(got.get(3), o3, p2, expdata4, refs2, refmap2); // new test for extractor that fails on an array OOB failGetSubset(user, Arrays.asList( new SubObjectIdentifier(oident2, new ObjectPaths( Arrays.asList("/array/3", "/array/0")))), new TypedObjectExtractionException( "Invalid selection: no array element exists at position '3', at: /array/3")); got = ws.getObjectsSubSet(user, Arrays.asList( new SubObjectIdentifier(oident1, new ObjectPaths( Arrays.asList("/map/*/thing"))), new SubObjectIdentifier(oident2, new ObjectPaths( Arrays.asList("/array/[*]/thing"))))); expdata1 = createData( "{\"map\": {\"id1\": {\"thing\": \"foo\"}," + " \"id2\": {\"thing\": \"foo2\"}," + " \"id3\": {\"thing\": \"foo3\"}" + " }" + "}" ); expdata2 = createData( "{\"array\": [{\"thing\": \"foo\"}," + " {\"thing\": \"foo2\"}," + " {\"thing\": \"foo3\"}" + " ]" + "}" ); compareObjectAndInfo(got.get(0), o1, p1, expdata1, refs1, refmap1); compareObjectAndInfo(got.get(1), o2, p2, expdata2, refs2, refmap2); failGetSubset(user, Arrays.asList( new SubObjectIdentifier(oident1, new ObjectPaths( Arrays.asList("/map/id1/id/5")))), new TypedObjectExtractionException( "Invalid selection: the path given specifies fields or elements that do not exist " + "because data at this location is a scalar value (i.e. string, integer, float), at: /map/id1/id")); failGetSubset(user2, Arrays.asList( new SubObjectIdentifier(oident1, new ObjectPaths( Arrays.asList("/map/*/thing")))), new InaccessibleObjectException( "Object o1 cannot be accessed: User subUser2 may not read workspace subData")); try { ws.getObjectsSubSet(user2, Arrays.asList(new SubObjectIdentifier( new ObjectIdentifier(wsi, 2), null))); fail("Able to get obj data from private workspace"); } catch (InaccessibleObjectException ioe) { assertThat("correct exception message", ioe.getLocalizedMessage(), is("Object 2 cannot be accessed: User subUser2 may not read workspace subData")); assertThat("correct object returned", ioe.getInaccessibleObject(), is(new ObjectIdentifier(wsi, 2))); } } @Test public void getReferencingObjects() throws Exception { WorkspaceUser user1 = new WorkspaceUser("refUser"); WorkspaceUser user2 = new WorkspaceUser("refUser2"); WorkspaceIdentifier wsitar1 = new WorkspaceIdentifier("refstarget1"); WorkspaceIdentifier wsitar2 = new WorkspaceIdentifier("refstarget2"); WorkspaceIdentifier wsisrc1 = new WorkspaceIdentifier("refssource1"); WorkspaceIdentifier wsisrc2 = new WorkspaceIdentifier("refssource2"); WorkspaceIdentifier wsisrc2noaccess = new WorkspaceIdentifier("refssource2noaccess"); WorkspaceIdentifier wsisrcdel1 = new WorkspaceIdentifier("refssourcedel1"); WorkspaceIdentifier wsisrc2gl = new WorkspaceIdentifier("refssourcegl"); long wsid = ws.createWorkspace(user1, wsitar1.getName(), false, null, null).getId(); ws.setPermissions(user1, wsitar1, Arrays.asList(user2), Permission.READ); ws.createWorkspace(user2, wsitar2.getName(), false, null, null); ws.setPermissions(user2, wsitar2, Arrays.asList(user1), Permission.READ); ws.createWorkspace(user1, wsisrc1.getName(), false, null, null); ws.createWorkspace(user2, wsisrc2.getName(), false, null, null); ws.setPermissions(user2, wsisrc2, Arrays.asList(user1), Permission.READ); ws.createWorkspace(user2, wsisrc2noaccess.getName(), false, null, null); ws.createWorkspace(user1, wsisrcdel1.getName(), false, null, null); ws.createWorkspace(user2, wsisrc2gl.getName(), true, null, null); TypeDefId reftype = new TypeDefId(new TypeDefName("CopyRev", "RefType"), 1, 0); Map<String, String> pmeta1 = new HashMap<String, String>(); pmeta1.put("metastuff", "meta"); Map<String, String> pmeta2 = new HashMap<String, String>(); pmeta2.put("meta2", "my hovercraft is full of eels"); WorkspaceUserMetadata meta1 = new WorkspaceUserMetadata(pmeta1); WorkspaceUserMetadata meta2 = new WorkspaceUserMetadata(pmeta2); Map<String, Object> mtdata = new HashMap<String, Object>(); Provenance p1 = new Provenance(user1); //test objects with no references or no accessible references ws.saveObjects(user1, wsitar1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("norefs"), mtdata, SAFE_TYPE1, null, p1, false), new WorkspaceSaveObject(new ObjectIDNoWSNoVer("deletedref"), mtdata, SAFE_TYPE1, null, p1, false), new WorkspaceSaveObject(new ObjectIDNoWSNoVer("unreadableref"), mtdata, SAFE_TYPE1, null, p1, false), new WorkspaceSaveObject(new ObjectIDNoWSNoVer("deletedprovref"), mtdata, SAFE_TYPE1, null, p1, false), new WorkspaceSaveObject(new ObjectIDNoWSNoVer("unreadableprovref"), mtdata, SAFE_TYPE1, null, p1, false)), getIdFactory()); Map<String, Object> refdata = new HashMap<String, Object>(); refdata.put("refs", Arrays.asList("refstarget1/deletedref")); ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("delrefptr"), refdata, reftype, null, p1, false)), getIdFactory()); ws.setObjectsDeleted(user1, Arrays.asList( new ObjectIdentifier(wsisrc1, "delrefptr")), true); refdata.put("refs", Arrays.asList("refstarget1/unreadableref")); ws.saveObjects(user2, wsisrc2noaccess, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("unreadrefptr"), refdata, reftype, null, p1, false)), getIdFactory()); ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("deletedprovrefptr"), mtdata, SAFE_TYPE1, null, new Provenance(user1).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget1/deletedprovref"))), false)), getIdFactory()); ws.setObjectsDeleted(user1, Arrays.asList( new ObjectIdentifier(wsisrc1, "deletedprovrefptr")), true); ws.saveObjects(user2, wsisrc2noaccess, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("unreadableprovrefptr"), mtdata, SAFE_TYPE1, null, new Provenance(user1).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget1/unreadableprovref"))), false)), getIdFactory()); List<Set<ObjectInformation>> mtrefs = new ArrayList<Set<ObjectInformation>>(); mtrefs.add(new HashSet<ObjectInformation>()); for (String name: Arrays.asList("norefs", "deletedref", "unreadableref", "deletedprovref", "unreadableprovref")) { assertThat("ref lists empty", ws.getReferencingObjects(user1, Arrays.asList(new ObjectIdentifier(wsitar1, name))), is(mtrefs)); } ws.saveObjects(user1, wsitar1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("stk"), mtdata, SAFE_TYPE1, meta1, p1, false), new WorkspaceSaveObject(new ObjectIDNoWSNoVer("stk"), mtdata, SAFE_TYPE1, meta2, p1, false), new WorkspaceSaveObject(new ObjectIDNoWSNoVer("single"), mtdata, SAFE_TYPE1, meta1, p1, false)), getIdFactory()); ws.saveObjects(user2, wsitar2, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("stk2"), mtdata, SAFE_TYPE1, meta1, p1, false), new WorkspaceSaveObject(new ObjectIDNoWSNoVer("stk2"), mtdata, SAFE_TYPE1, meta2, p1, false), new WorkspaceSaveObject(new ObjectIDNoWSNoVer("single2"), mtdata, SAFE_TYPE1, meta1, p1, false)), getIdFactory()); refdata.put("refs", Arrays.asList("refstarget1/stk/1")); ObjectInformation stdref1 = ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("stdref"), refdata, reftype, meta1, new Provenance(user1).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget1/stk/1"))), false)), getIdFactory()).get(0); refdata.put("refs", Arrays.asList("refstarget1/stk/2")); ObjectInformation stdref2 = ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("stdref"), refdata, reftype, meta2, new Provenance(user1), false)), getIdFactory()).get(0); refdata.put("refs", Arrays.asList("refstarget1/stk")); ObjectInformation hiddenref = ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("hiddenref"), refdata, reftype, meta1, new Provenance(user1), true)), getIdFactory()).get(0); refdata.put("refs", Arrays.asList("refstarget2/stk2")); @SuppressWarnings("unused") ObjectInformation delref = ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("delref"), refdata, reftype, meta1, new Provenance(user1).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget1/stk/2"))), true)), getIdFactory()).get(0); ws.setObjectsDeleted(user1, Arrays.asList(new ObjectIdentifier(wsisrc1, "delref")), true); refdata.put("refs", Arrays.asList("refstarget1/single")); ObjectInformation readable = ws.saveObjects(user2, wsisrc2, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("readable"), refdata, reftype, meta2, new Provenance(user2), true)), getIdFactory()).get(0); refdata.put("refs", Arrays.asList("refstarget2/stk2/2")); @SuppressWarnings("unused") ObjectInformation unreadable = ws.saveObjects(user2, wsisrc2noaccess, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("unreadable"), refdata, reftype, meta1, new Provenance(user2), true)), getIdFactory()).get(0); refdata.put("refs", Arrays.asList("refstarget2/single2/1")); @SuppressWarnings("unused") ObjectInformation wsdeletedreadable1 = ws.saveObjects(user1, wsisrcdel1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("wsdeletedreadable1"), refdata, reftype, meta2, new Provenance(user1), false)), getIdFactory()).get(0); ws.setWorkspaceDeleted(user1, wsisrcdel1, true); refdata.put("refs", Arrays.asList("refstarget2/stk2/1")); ObjectInformation globalrd = ws.saveObjects(user2, wsisrc2gl, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("globalrd"), refdata, reftype, meta1, new Provenance(user2).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget1/single/1"))), false)), getIdFactory()).get(0); List<ObjectIdentifier> objs = Arrays.asList( new ObjectIdentifier(wsitar1, "stk"), new ObjectIdentifier(wsitar1, "stk", 2), new ObjectIdentifier(wsitar1, "stk", 1)); assertThat("got correct refs", ws.getReferencingObjects(user1, objs), is(Arrays.asList( oiset(stdref2, hiddenref), oiset(stdref2, hiddenref), oiset(stdref1)))); assertThat("got correct refcounts", ws.getReferencingObjectCounts(user1, objs), is(Arrays.asList(3, 3, 1))); Set<ObjectInformation> mtoiset = new HashSet<ObjectInformation>(); objs = Arrays.asList( new ObjectIdentifier(wsitar2, "stk2"), new ObjectIdentifier(wsitar2, "stk2", 2), new ObjectIdentifier(wsitar2, "stk2", 1)); assertThat("got correct refs", ws.getReferencingObjects(user1, objs), is(Arrays.asList( mtoiset, mtoiset, oiset(globalrd)))); assertThat("got correct refcounts", ws.getReferencingObjectCounts(user1, objs), is(Arrays.asList(2, 2, 1))); objs = Arrays.asList( new ObjectIdentifier(wsitar1, "single"), new ObjectIdentifier(wsitar1, "single", 1), new ObjectIdentifier(wsitar2, "single2"), new ObjectIdentifier(wsitar2, "single2", 1)); assertThat("got correct refs", ws.getReferencingObjects(user1,objs), is(Arrays.asList( oiset(readable, globalrd), oiset(readable, globalrd), mtoiset, mtoiset))); assertThat("got correct refcounts", ws.getReferencingObjectCounts(user1, objs), is(Arrays.asList(2, 2, 1, 1))); ObjectInformation pstdref1 = ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("pstdref"), mtdata, SAFE_TYPE1, meta1, new Provenance(user1).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget1/stk/1"))), false)), getIdFactory()).get(0); ObjectInformation pstdref2 = ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("pstdref"), mtdata, SAFE_TYPE1, meta2, new Provenance(user1).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget1/stk/2"))), false)), getIdFactory()).get(0); ObjectInformation phiddenref = ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("phiddenref"), mtdata, SAFE_TYPE1, meta1, new Provenance(user1).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget1/stk"))), true)), getIdFactory()).get(0); @SuppressWarnings("unused") ObjectInformation pdelref = ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("pdelref"), mtdata, SAFE_TYPE1, meta1, new Provenance(user1).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget2/stk2"))), true)), getIdFactory()).get(0); ws.setObjectsDeleted(user1, Arrays.asList(new ObjectIdentifier(wsisrc1, "pdelref")), true); ObjectInformation preadable = ws.saveObjects(user2, wsisrc2, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("preadable"), mtdata, SAFE_TYPE1, meta2, new Provenance(user2).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget1/single"))), true)), getIdFactory()).get(0); @SuppressWarnings("unused") ObjectInformation punreadable = ws.saveObjects(user2, wsisrc2noaccess, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("punreadable"), mtdata, SAFE_TYPE1, meta1, new Provenance(user2).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget2/stk2/2"))), true)), getIdFactory()).get(0); ws.setWorkspaceDeleted(user1, wsisrcdel1, false); @SuppressWarnings("unused") ObjectInformation pwsdeletedreadable1 = ws.saveObjects(user1, wsisrcdel1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("pwsdeletedreadable1"), mtdata, SAFE_TYPE1, meta2, new Provenance(user1).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget2/single2/1"))), false)), getIdFactory()).get(0); ws.setWorkspaceDeleted(user1, wsisrcdel1, true); ObjectInformation pglobalrd = ws.saveObjects(user2, wsisrc2gl, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("pglobalrd"), mtdata, SAFE_TYPE1, meta1, new Provenance(user2).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget2/stk2/1"))), false)), getIdFactory()).get(0); objs = Arrays.asList( new ObjectIdentifier(wsitar1, "stk"), new ObjectIdentifier(wsitar1, "stk", 2), new ObjectIdentifier(wsitar1, "stk", 1)); assertThat("got correct refs", ws.getReferencingObjects(user1, objs), is(Arrays.asList( oiset(stdref2, hiddenref, pstdref2, phiddenref), oiset(stdref2, hiddenref, pstdref2, phiddenref), oiset(stdref1, pstdref1)))); assertThat("got correct refcounts", ws.getReferencingObjectCounts(user1, objs), is(Arrays.asList(5, 5, 2))); objs = Arrays.asList( new ObjectIdentifier(wsitar2, "stk2"), new ObjectIdentifier(wsitar2, "stk2", 2), new ObjectIdentifier(wsitar2, "stk2", 1)); assertThat("got correct refs", ws.getReferencingObjects(user1, objs), is(Arrays.asList( mtoiset, mtoiset, oiset(globalrd, pglobalrd)))); assertThat("got correct refcounts", ws.getReferencingObjectCounts(user1, objs), is(Arrays.asList(4, 4, 2))); objs = Arrays.asList( new ObjectIdentifier(wsitar1, "single"), new ObjectIdentifier(wsitar1, "single", 1), new ObjectIdentifier(wsitar2, "single2"), new ObjectIdentifier(wsitar2, "single2", 1)); assertThat("got correct refs", ws.getReferencingObjects(user1, objs), is(Arrays.asList( oiset(readable, globalrd, preadable), oiset(readable, globalrd, preadable), mtoiset, mtoiset))); assertThat("got correct refcounts", ws.getReferencingObjectCounts(user1, objs), is(Arrays.asList(3, 3, 2, 2))); try { ws.getReferencingObjects(user2, Arrays.asList( new ObjectIdentifier(wsisrc1, 1))); fail("Able to get ref obj data from private workspace"); } catch (InaccessibleObjectException ioe) { assertThat("correct exception message", ioe.getLocalizedMessage(), is("Object 1 cannot be accessed: User refUser2 may not read workspace refssource1")); assertThat("correct object returned", ioe.getInaccessibleObject(), is(new ObjectIdentifier(wsisrc1, 1))); } try { ws.getReferencingObjectCounts(user2, Arrays.asList( new ObjectIdentifier(wsisrc1, 1))); fail("Able to get ref obj count from private workspace"); } catch (InaccessibleObjectException ioe) { assertThat("correct exception message", ioe.getLocalizedMessage(), is("Object 1 cannot be accessed: User refUser2 may not read workspace refssource1")); assertThat("correct object returned", ioe.getInaccessibleObject(), is(new ObjectIdentifier(wsisrc1, 1))); } try { ws.getReferencingObjectCounts(user1, Arrays.asList( new ObjectIdentifier(wsitar1, "single", 2))); fail("Able to get ref obj count for non-existant obj version"); } catch (NoSuchObjectException ioe) { assertThat("correct exception message", ioe.getLocalizedMessage(), is("No object with id 7 (name single) and version 2 exists in workspace " + wsid)); ObjectIDResolvedWS resobj = ioe.getResolvedInaccessibleObject(); assertThat("correct ws id in returned oid", resobj.getWorkspaceIdentifier().getID(), is(wsid)); assertThat("correct ws name in returned oid", resobj.getWorkspaceIdentifier().getName(), is(wsitar1.getName())); assertThat("correct objid in returned oid", resobj.getId(), is((Long) null)); assertThat("correct obj name in returned oid", resobj.getName(), is("single")); assertThat("correct obj ver in returned oid", resobj.getVersion(), is(2)); } ws.setGlobalPermission(user2, wsisrc2gl, Permission.NONE); } @Test public void getReferencedObjects() throws Exception { WorkspaceUser user1 = new WorkspaceUser("refedUser"); WorkspaceUser user2 = new WorkspaceUser("refedUser2"); WorkspaceIdentifier wsiacc1 = new WorkspaceIdentifier("refedaccessible"); WorkspaceIdentifier wsiacc2 = new WorkspaceIdentifier("refedaccessible2"); WorkspaceIdentifier wsiun1 = new WorkspaceIdentifier("refedunacc"); WorkspaceIdentifier wsiun2 = new WorkspaceIdentifier("refedunacc2"); WorkspaceIdentifier wsidel = new WorkspaceIdentifier("refeddel"); ws.createWorkspace(user1, wsiacc1.getName(), false, null, null); ws.setPermissions(user1, wsiacc1, Arrays.asList(user2), Permission.WRITE); ws.createWorkspace(user2, wsiacc2.getName(), true, null, null); long wsidun1 = ws.createWorkspace(user2, wsiun1.getName(), false, null, null).getId(); long wsidun2 = ws.createWorkspace(user2, wsiun2.getName(), false, null, null).getId(); ws.createWorkspace(user2, wsidel.getName(), false, null, null); TypeDefId reftype = new TypeDefId(new TypeDefName("CopyRev", "RefType"), 1, 0); Map<String, String> meta1 = new HashMap<String, String>(); meta1.put("some", "very special metadata"); Map<String, String> meta2 = new HashMap<String, String>(); meta2.put("some", "very special metadata2"); Map<String, String> mtdata = new HashMap<String, String>(); Map<String, Object> data1 = createData( "{\"thing1\": \"whoop whoop\"," + " \"thing2\": \"aroooga\"}"); Map<String, Object> data2 = createData( "{\"thing3\": \"whoop whoop\"," + " \"thing4\": \"aroooga\"}"); ObjectInformation leaf1 = saveObject(user2, wsiun1, meta1, data1, SAFE_TYPE1, "leaf1", new Provenance(user2)); ObjectIdentifier leaf1oi = new ObjectIdentifier(wsiun1, "leaf1"); failGetObjects(user1, Arrays.asList(leaf1oi), new InaccessibleObjectException( "Object leaf1 cannot be accessed: User refedUser may not read workspace refedunacc")); ObjectInformation leaf2 = saveObject(user2, wsiun2, meta2, data2, SAFE_TYPE1, "leaf2", new Provenance(user2)); ObjectIdentifier leaf2oi = new ObjectIdentifier(wsiun2, "leaf2"); failGetObjects(user1, Arrays.asList(leaf2oi), new InaccessibleObjectException( "Object leaf2 cannot be accessed: User refedUser may not read workspace refedunacc2")); saveObject(user2, wsiun2, meta2, data2, SAFE_TYPE1, "unlinked", new Provenance(user2)); ObjectIdentifier unlinkedoi = new ObjectIdentifier(wsiun2, "unlinked"); failGetObjects(user1, Arrays.asList(unlinkedoi), new InaccessibleObjectException( "Object unlinked cannot be accessed: User refedUser may not read workspace refedunacc2")); final String leaf1r = "refedunacc/leaf1"; saveObject(user2, wsiacc1, MT_META, makeRefData(leaf1r),reftype, "simpleref", new Provenance(user2)); final String leaf2r = "refedunacc2/leaf2"; saveObject(user2, wsiacc2, MT_META, makeRefData(leaf2r),reftype, "simpleref2", new Provenance(user2)); saveObject(user2, wsiacc1, MT_META, mtdata, SAFE_TYPE1, "provref", new Provenance(user2) .addAction(new ProvenanceAction().withWorkspaceObjects( Arrays.asList(leaf1r)))); saveObject(user2, wsiacc2, MT_META, mtdata, SAFE_TYPE1, "provref2", new Provenance(user2) .addAction(new ProvenanceAction().withWorkspaceObjects( Arrays.asList(leaf2r)))); final HashMap<String, String> mtmap = new HashMap<String, String>(); final LinkedList<String> mtlist = new LinkedList<String>(); checkReferencedObject(user1, new ObjectChain(new ObjectIdentifier(wsiacc1, "simpleref"), Arrays.asList(leaf1oi)), leaf1, new Provenance(user2), data1, mtlist, mtmap); checkReferencedObject(user1, new ObjectChain(new ObjectIdentifier(wsiacc2, "simpleref2"), Arrays.asList(leaf2oi)), leaf2, new Provenance(user2), data2, mtlist, mtmap); checkReferencedObject(user1, new ObjectChain(new ObjectIdentifier(wsiacc1, "provref"), Arrays.asList(leaf1oi)), leaf1, new Provenance(user2), data1, mtlist, mtmap); checkReferencedObject(user1, new ObjectChain(new ObjectIdentifier(wsiacc2, "provref2"), Arrays.asList(leaf2oi)), leaf2, new Provenance(user2), data2, mtlist, mtmap); failGetReferencedObjects(user1, Arrays.asList(new ObjectChain(new ObjectIdentifier(wsiacc2, "simpleref2"), Arrays.asList(leaf1oi))), new NoSuchReferenceException( "The object simpleref2 in workspace refedaccessible2 does not contain the reference " + wsidun1 + "/1/1", null, null)); ObjectInformation del1 = saveObject(user2, wsiun1, meta2, makeRefData(leaf1r, leaf2r), reftype, "del1", new Provenance(user2)); ObjectIdentifier del1oi = new ObjectIdentifier(wsiun1, "del1"); final Provenance p = new Provenance(user2).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList(leaf1r, leaf2r))); ObjectInformation del2 = saveObject(user2, wsiun2, meta1, makeRefData(), reftype, "del2", p); ObjectIdentifier del2oi = new ObjectIdentifier(wsiun2, "del2"); saveObject(user2, wsidel, meta1, makeRefData(leaf2r), reftype, "delws", new Provenance(user2)); ObjectIdentifier delwsoi = new ObjectIdentifier(wsidel, "delws"); saveObject(user2, wsiacc1, MT_META, makeRefData("refedunacc/del1", "refedunacc2/del2"), reftype, "delptr12", new Provenance(user2)); ObjectIdentifier delptr12oi = new ObjectIdentifier(wsiacc1, "delptr12"); saveObject(user2, wsiacc2, MT_META, makeRefData("refedunacc2/del2"), reftype, "delptr2", new Provenance(user2)); ObjectIdentifier delptr2oi = new ObjectIdentifier(wsiacc2, "delptr2"); saveObject(user2, wsiacc2, MT_META, makeRefData("refeddel/delws"), reftype, "delptrws", new Provenance(user2)); ObjectIdentifier delptrwsoi = new ObjectIdentifier(wsiacc2, "delptrws"); ws.setObjectsDeleted(user2, Arrays.asList(del1oi, del2oi), true); ws.setWorkspaceDeleted(user2, wsidel, true); List<WorkspaceObjectData> lwod = ws.getReferencedObjects(user1, Arrays.asList( new ObjectChain(delptr12oi, Arrays.asList(del1oi, leaf1oi)), new ObjectChain(delptr12oi, Arrays.asList(del1oi, leaf2oi)), new ObjectChain(delptr12oi, Arrays.asList(del2oi, leaf1oi)), new ObjectChain(delptrwsoi, Arrays.asList(delwsoi, leaf2oi)), new ObjectChain(delptr12oi, Arrays.asList(del2oi, leaf2oi)), new ObjectChain(delptr2oi, Arrays.asList(del2oi, leaf1oi)), new ObjectChain(delptr2oi, Arrays.asList(del2oi, leaf2oi)) )); assertThat("correct list size", lwod.size(), is(7)); compareObjectAndInfo(lwod.get(0), leaf1, new Provenance(user2), data1, mtlist, mtmap); compareObjectAndInfo(lwod.get(1), leaf2, new Provenance(user2), data2, mtlist, mtmap); compareObjectAndInfo(lwod.get(2), leaf1, new Provenance(user2), data1, mtlist, mtmap); compareObjectAndInfo(lwod.get(3), leaf2, new Provenance(user2), data2, mtlist, mtmap); compareObjectAndInfo(lwod.get(4), leaf2, new Provenance(user2), data2, mtlist, mtmap); compareObjectAndInfo(lwod.get(5), leaf1, new Provenance(user2), data1, mtlist, mtmap); compareObjectAndInfo(lwod.get(6), leaf2, new Provenance(user2), data2, mtlist, mtmap); checkReferencedObject(user1, new ObjectChain(delptr12oi, Arrays.asList(del1oi)), del1, new Provenance(user2), makeRefData(wsidun1 + "/1/1", wsidun2 + "/1/1"), Arrays.asList(wsidun1 + "/1/1", wsidun2 + "/1/1"), mtmap); Map<String, String> provmap = new HashMap<String, String>(); provmap.put(leaf1r, wsidun1 + "/1/1"); provmap.put(leaf2r, wsidun2 + "/1/1"); checkReferencedObject(user1, new ObjectChain(delptr12oi, Arrays.asList(del2oi)), del2, p, makeRefData(), mtlist, provmap); failGetReferencedObjects(user1, Arrays.asList(new ObjectChain(delptr2oi, Arrays.asList(del1oi, leaf1oi))), new NoSuchReferenceException( "The object delptr2 in workspace refedaccessible2 does not contain the reference " + wsidun1 + "/2/1", null, null)); failGetReferencedObjects(user1, Arrays.asList(new ObjectChain(delptr12oi, Arrays.asList(del1oi, unlinkedoi))), new NoSuchReferenceException( "The object del1 in workspace refedunacc does not contain the reference " + wsidun2 + "/2/1", null, null)); failGetReferencedObjects(user1, Arrays.asList(new ObjectChain(delptr12oi, Arrays.asList(del1oi, new ObjectIdentifier(wsiun1, "leaf2")))), new NoSuchObjectException( "No object with name leaf2 exists in workspace " + wsidun1, null, null)); failGetReferencedObjects(user1, Arrays.asList(new ObjectChain(delptr12oi, Arrays.asList(del1oi, new ObjectIdentifier(wsiun1, "leaf1", 2)))), new NoSuchObjectException( "No object with id 1 (name leaf1) and version 2 exists in workspace " + wsidun1, null, null)); failGetReferencedObjects(user2, new ArrayList<ObjectChain>(), new IllegalArgumentException("No object identifiers provided")); failGetReferencedObjects(user2, Arrays.asList(new ObjectChain(new ObjectIdentifier(wsiun1, "leaf3"), Arrays.asList(new ObjectIdentifier(wsiun1, "leaf1")))), new InaccessibleObjectException("No object with name leaf3 exists in workspace " + wsidun1)); failGetReferencedObjects(user1, Arrays.asList(new ObjectChain(new ObjectIdentifier(new WorkspaceIdentifier("fakefakefake"), "leaf1"), Arrays.asList(new ObjectIdentifier(wsiun1, "leaf1")))), new InaccessibleObjectException("Object leaf1 cannot be accessed: No workspace with name fakefakefake exists")); failGetReferencedObjects(user1, Arrays.asList(new ObjectChain(new ObjectIdentifier(wsiun1, "leaf1"), Arrays.asList(new ObjectIdentifier(wsiun1, "leaf1")))), new InaccessibleObjectException("Object leaf1 cannot be accessed: User refedUser may not read workspace refedunacc")); failGetReferencedObjects(null, Arrays.asList(new ObjectChain(new ObjectIdentifier(wsiun1, "leaf1"), Arrays.asList(new ObjectIdentifier(wsiun1, "leaf1")))), new InaccessibleObjectException("Object leaf1 cannot be accessed: Anonymous users may not read workspace refedunacc")); ws.setObjectsDeleted(user2, Arrays.asList(new ObjectIdentifier(wsiun1, "leaf1")), true); failGetReferencedObjects(user2, Arrays.asList(new ObjectChain(new ObjectIdentifier(wsiun1, "leaf1"), Arrays.asList(new ObjectIdentifier(wsiun1, "leaf1")))), new InaccessibleObjectException("Object 1 (name leaf1) in workspace " + wsidun1 + " has been deleted")); ws.setObjectsDeleted(user2, Arrays.asList(new ObjectIdentifier(wsiun1, "leaf1")), false); ws.setWorkspaceDeleted(user2, wsiun1, true); failGetReferencedObjects(user2, Arrays.asList(new ObjectChain(new ObjectIdentifier(wsiun1, "leaf1"), Arrays.asList(new ObjectIdentifier(wsiun1, "leaf1")))), new InaccessibleObjectException("Object leaf1 cannot be accessed: Workspace refedunacc is deleted")); ws.setGlobalPermission(user2, wsiacc2, Permission.NONE); } @Test public void objectChain() throws Exception { WorkspaceIdentifier wsi = new WorkspaceIdentifier("foo"); ObjectIdentifier oi = new ObjectIdentifier(wsi, "thing"); failCreateObjectChain(null, new ArrayList<ObjectIdentifier>(), new IllegalArgumentException("Neither head nor chain can be null")); failCreateObjectChain(oi, null, new IllegalArgumentException("Neither head nor chain can be null")); failCreateObjectChain(oi, new ArrayList<ObjectIdentifier>(), new IllegalArgumentException("Chain cannot be empty")); failCreateObjectChain(oi, Arrays.asList(oi, null, oi), new IllegalArgumentException("Nulls are not allowed in reference chains")); } @Test public void grantRemoveOwnership() throws Exception { WorkspaceUser user = new WorkspaceUser("foo"); String moduleName = "SharedModule"; ws.requestModuleRegistration(user, moduleName); ws.resolveModuleRegistration(moduleName, true); ws.compileNewTypeSpec(user, "module " + moduleName + " {typedef int MainType;};", Arrays.asList("MainType"), null, null, false, null); ws.releaseTypes(user, moduleName); WorkspaceUser user2 = new WorkspaceUser("bar"); try { ws.compileNewTypeSpec(user2, "module " + moduleName + " {typedef string MainType;};", Collections.<String>emptyList(), null, null, false, null); Assert.fail(); } catch (NoSuchPrivilegeException ex) { Assert.assertTrue(ex.getMessage(), ex.getMessage().contains("not in list of owners")); } ws.grantModuleOwnership(moduleName, user2.getUser(), false, user, false); ws.compileNewTypeSpec(user2, "module " + moduleName + " {typedef string MainType;};", Collections.<String>emptyList(), null, null, false, null); WorkspaceUser user3 = new WorkspaceUser("baz"); try { ws.grantModuleOwnership(moduleName, user3.getUser(), false, user2, false); Assert.fail(); } catch (NoSuchPrivilegeException ex) { Assert.assertTrue(ex.getMessage(), ex.getMessage().contains("can not change privileges")); } ws.grantModuleOwnership(moduleName, user2.getUser(), true, user, false); ws.grantModuleOwnership(moduleName, user3.getUser(), false, user2, false); ws.removeModuleOwnership(moduleName, user3.getUser(), user2, false); ws.removeModuleOwnership(moduleName, user2.getUser(), user, false); try { ws.compileNewTypeSpec(user2, "module " + moduleName + " {typedef float MainType;};", Collections.<String>emptyList(), null, null, false, null); Assert.fail(); } catch (NoSuchPrivilegeException ex) { Assert.assertTrue(ex.getMessage(), ex.getMessage().contains("not in list of owners")); } } @Test public void removeTypeTest() throws Exception { WorkspaceUser user = new WorkspaceUser("foo"); String moduleName = "MyMod3"; ws.requestModuleRegistration(user, moduleName); ws.resolveModuleRegistration(moduleName, true); ws.compileNewTypeSpec(user, "module " + moduleName + " {" + "typedef structure {string foo; list<int> bar; int baz;} AType; " + "typedef structure {string whooo;} BType;};", Arrays.asList("AType", "BType"), null, null, false, null); ws.compileTypeSpec(user, moduleName, Collections.<String>emptyList(), Arrays.asList("BType"), Collections.<String, Long>emptyMap(), false); List<Long> vers = ws.getModuleVersions(moduleName, user); Collections.sort(vers); Assert.assertEquals(2, vers.size()); Assert.assertEquals(2, ws.getModuleInfo(user, new ModuleDefId(moduleName, vers.get(0))).getTypes().size()); Assert.assertEquals(1, ws.getModuleInfo(user, new ModuleDefId(moduleName, vers.get(1))).getTypes().size()); Assert.assertEquals(Arrays.asList(vers.get(0)), ws.getModuleVersions(new TypeDefId(moduleName + ".BType", "0.1"), user)); ws.releaseTypes(user, moduleName); Assert.assertEquals(1, ws.getModuleVersions(new TypeDefId(moduleName + ".AType"), null).size()); Assert.assertEquals(moduleName + ".AType-1.0", ws.getTypeInfo(moduleName + ".AType", false, null).getTypeDefId()); } @Test public void admin() throws Exception { assertThat("no admins before adding any", ws.getAdmins(), is((Set<WorkspaceUser>) new HashSet<WorkspaceUser>())); ws.addAdmin(new WorkspaceUser("adminguy")); Set<WorkspaceUser> expected = new HashSet<WorkspaceUser>(); expected.add(new WorkspaceUser("adminguy")); assertThat("correct admins", ws.getAdmins(), is(expected)); assertTrue("correctly detected as admin", ws.isAdmin(new WorkspaceUser("adminguy"))); assertFalse("correctly detected as not an admin", ws.isAdmin(new WorkspaceUser("adminguy2"))); ws.addAdmin(new WorkspaceUser("adminguy2")); expected.add(new WorkspaceUser("adminguy2")); assertThat("correct admins", ws.getAdmins(), is(expected)); assertTrue("correctly detected as admin", ws.isAdmin(new WorkspaceUser("adminguy"))); assertTrue("correctly detected as admin", ws.isAdmin(new WorkspaceUser("adminguy2"))); assertFalse("correctly detected as not an admin", ws.isAdmin(new WorkspaceUser("adminguy3"))); ws.removeAdmin(new WorkspaceUser("adminguy")); expected.remove(new WorkspaceUser("adminguy")); assertThat("correct admins", ws.getAdmins(), is(expected)); assertFalse("correctly detected as not an admin", ws.isAdmin(new WorkspaceUser("adminguy"))); assertTrue("correctly detected as admin", ws.isAdmin(new WorkspaceUser("adminguy2"))); assertFalse("correctly detected as not an admin", ws.isAdmin(new WorkspaceUser("adminguy3"))); } @Test public void getAllWorkspaceOwners() throws Exception { Set<WorkspaceUser> startusers = ws.getAllWorkspaceOwners(); String userprefix = "getAllWorkspaceOwners"; Set<WorkspaceUser> users = new HashSet<WorkspaceUser>(); for (int i = 0; i < 4; i++) { String u = userprefix + i; users.add(new WorkspaceUser(u)); ws.createWorkspace(new WorkspaceUser(u), u + ":" + userprefix, false, null, null); } Set<WorkspaceUser> newusers = ws.getAllWorkspaceOwners(); newusers.removeAll(startusers); assertThat("got correct list of workspace users", newusers, is(users)); } @Test public void sortForMD5() throws Exception { WorkspaceUser user = new WorkspaceUser("md5user"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("sorting"); ws.createWorkspace(user, wsi.getIdentifierString(), false, null, null); Map<String, Object> data = new LinkedHashMap<String, Object>(); data.put("g", 7); data.put("d", 4); data.put("a", 1); data.put("e", 5); data.put("b", 2); data.put("f", 6); data.put("c", 3); String expected = "{\"a\":1,\"b\":2,\"c\":3,\"d\":4,\"e\":5,\"f\":6,\"g\":7}"; String md5 = DigestUtils.md5Hex(expected); assertThat("md5 correct", md5, is("f906e268b16cbfa1c302c6bb51a6b784")); JsonNode savedata = MAPPER.valueToTree(data); Provenance p = new Provenance(new WorkspaceUser("kbasetest2")); List<WorkspaceSaveObject> objects = Arrays.asList( new WorkspaceSaveObject(savedata, SAFE_TYPE1, null, p, false)); List<ObjectInformation> objinfo = ws.saveObjects(user, wsi, objects, getIdFactory()); assertThat("workspace calculated md5 correct", objinfo.get(0).getCheckSum(), is(md5)); objinfo = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(wsi, 1)), false, false); assertThat("workspace calculated md5 correct", objinfo.get(0).getCheckSum(), is(md5)); } @Test public void maxObjectSize() throws Exception { WorkspaceUser user = new WorkspaceUser("MOSuser"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("maxObjectSize"); ws.createWorkspace(user, wsi.getIdentifierString(), false, null, null); Map<String, Object> data = new HashMap<String, Object>(); data.put("foo", "9012345678"); ResourceUsageConfiguration oldcfg = ws.getResourceConfig(); ResourceUsageConfigurationBuilder build = new ResourceUsageConfigurationBuilder(oldcfg); ws.setResourceConfig(build.withMaxObjectSize(20).build()); saveObject(user, wsi, null, data, SAFE_TYPE1, "foo", new Provenance(user)); //should work data.put("foo", "90123456789"); failSave(user, wsi, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, new Provenance(user), false)), new IllegalArgumentException( "Object #1 data size 21 exceeds limit of 20")); ws.setResourceConfig(oldcfg); } @Test public void maxReturnedObjectSize() throws Exception { TypeDefId reftype = new TypeDefId(new TypeDefName("CopyRev", "RefType"), 1, 0); WorkspaceUser user = new WorkspaceUser("MROSuser"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("maxReturnedObjectSize"); ws.createWorkspace(user, wsi.getIdentifierString(), false, null, null); Map<String, Object> data = new HashMap<String, Object>(); data.put("fo", "90"); data.put("ba", "3"); saveObject(user, wsi, null, data, SAFE_TYPE1, "foo", new Provenance(user)); ObjectIdentifier oi1 = new ObjectIdentifier(wsi, "foo", 1); saveObject(user, wsi, null, data, SAFE_TYPE1, "foo2", new Provenance(user)); ObjectIdentifier oi2 = new ObjectIdentifier(wsi, "foo2", 1); List<ObjectIdentifier> oi1l = Arrays.asList(oi1); List<ObjectIdentifier> oi2l = Arrays.asList(oi2); Map<String, Object> refdata = new HashMap<String, Object>(); refdata.put("refs", Arrays.asList(wsi.getName() + "/foo/1")); saveObject(user, wsi, null, refdata, reftype, "ref", new Provenance(user)); refdata.put("refs", Arrays.asList(wsi.getName() + "/foo2/1")); saveObject(user, wsi, null, refdata, reftype, "ref2", new Provenance(user)); ObjectIdentifier ref = new ObjectIdentifier(wsi, "ref", 1); ObjectIdentifier ref2 = new ObjectIdentifier(wsi, "ref2", 1); List<ObjectChain> refchain = Arrays.asList(new ObjectChain(ref, oi1l)); List<ObjectChain> refchain2 = Arrays.asList(new ObjectChain(ref, oi1l), new ObjectChain(ref2, oi2l)); ResourceUsageConfiguration oldcfg = ws.getResourceConfig(); ResourceUsageConfigurationBuilder build = new ResourceUsageConfigurationBuilder( oldcfg).withMaxObjectSize(1); ws.setResourceConfig(build.withMaxReturnedDataSize(20).build()); List<SubObjectIdentifier> ois1l = Arrays.asList(new SubObjectIdentifier(oi1, new ObjectPaths(Arrays.asList("/fo")))); List<SubObjectIdentifier> ois1lmt = Arrays.asList(new SubObjectIdentifier(oi1, new ObjectPaths(new ArrayList<String>()))); successGetObjects(user, oi1l); ws.getObjectsSubSet(user, ois1l); ws.getObjectsSubSet(user, ois1lmt); ws.getReferencedObjects(user, refchain); ws.setResourceConfig(build.withMaxReturnedDataSize(19).build()); String errstr = "Too much data requested from the workspace at once; data requested " + "including potential subsets is %sB which exceeds maximum of %s."; IllegalArgumentException err = new IllegalArgumentException(String.format(errstr, 20, 19)); failGetObjects(user, oi1l, err, true); failGetSubset(user, ois1l, err); failGetSubset(user, ois1lmt, err); failGetReferencedObjects(user, refchain, err); ws.setResourceConfig(build.withMaxReturnedDataSize(40).build()); List<ObjectIdentifier> two = Arrays.asList(oi1, oi2); List<SubObjectIdentifier> ois1l2 = Arrays.asList( new SubObjectIdentifier(oi1, new ObjectPaths(Arrays.asList("/fo"))), new SubObjectIdentifier(oi1, new ObjectPaths(Arrays.asList("/ba")))); List<SubObjectIdentifier> bothoi = Arrays.asList( new SubObjectIdentifier(oi1, new ObjectPaths(Arrays.asList("/fo"))), new SubObjectIdentifier(oi2, new ObjectPaths(Arrays.asList("/ba")))); successGetObjects(user, two); ws.getObjectsSubSet(user, ois1l2); ws.getObjectsSubSet(user, bothoi); ws.getReferencedObjects(user, refchain2); ws.setResourceConfig(build.withMaxReturnedDataSize(39).build()); err = new IllegalArgumentException(String.format(errstr, 40, 39)); failGetObjects(user, two, err, true); failGetSubset(user, ois1l2, err); failGetSubset(user, bothoi, err); failGetReferencedObjects(user, refchain2, err); List<SubObjectIdentifier> all = new LinkedList<SubObjectIdentifier>(); all.addAll(ois1l2); all.addAll(bothoi); ws.setResourceConfig(build.withMaxReturnedDataSize(60).build()); ws.getObjectsSubSet(user, all); ws.setResourceConfig(build.withMaxReturnedDataSize(59).build()); err = new IllegalArgumentException(String.format(errstr, 60, 59)); failGetSubset(user, all, err); ws.setResourceConfig(oldcfg); } @Test public void useFileVsMemoryForData() throws Exception { WorkspaceUser user = new WorkspaceUser("sortfilemem"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("sortFileMem"); ws.createWorkspace(user, wsi.getIdentifierString(), false, null, null); Map<String, Object> data1 = new LinkedHashMap<String, Object>(); data1.put("z", 1); data1.put("y", 2); Provenance p = new Provenance(user); List<WorkspaceSaveObject> objs = new ArrayList<WorkspaceSaveObject>(); objs.add(new WorkspaceSaveObject(data1, SAFE_TYPE1, null, p, false)); final int[] filesCreated = {0}; TempFileListener listener = new TempFileListener() { @Override public void createdTempFile(File f) { filesCreated[0]++; } }; ws.getTempFilesManager().addListener(listener); ws.getTempFilesManager().cleanup(); //these tests don't clean up after each test ResourceUsageConfiguration oldcfg = ws.getResourceConfig(); ResourceUsageConfigurationBuilder build = new ResourceUsageConfigurationBuilder(oldcfg); //single file stays in memory ws.setResourceConfig(build.withMaxIncomingDataMemoryUsage(13).build()); ws.saveObjects(user, wsi, objs, getIdFactory()); assertThat("created no temp files on save", filesCreated[0], is(0)); ws.setResourceConfig(build.withMaxReturnedDataMemoryUsage(13).build()); ObjectIdentifier oi = new ObjectIdentifier(wsi, 1); ws.getObjects(user, Arrays.asList(oi)); assertThat("created no temp files on get", filesCreated[0], is(0)); ws.getObjectsSubSet(user, Arrays.asList(new SubObjectIdentifier(oi, new ObjectPaths(Arrays.asList("z"))))).get(0).getDataAsTokens().destroy(); assertThat("created 1 temp file on get subdata", filesCreated[0], is(1)); JSONRPCLayerTester.assertNoTempFilesExist(ws.getTempFilesManager()); //files go to disk except for small subdata filesCreated[0] = 0; ws.setResourceConfig(build.withMaxIncomingDataMemoryUsage(12).build()); ws.saveObjects(user, wsi, objs, getIdFactory()); assertThat("created temp files on save", filesCreated[0], is(2)); JSONRPCLayerTester.assertNoTempFilesExist(ws.getTempFilesManager()); filesCreated[0] = 0; ws.setResourceConfig(build.withMaxReturnedDataMemoryUsage(12).build()); oi = new ObjectIdentifier(wsi, 2); ws.getObjects(user, Arrays.asList(oi)).get(0).getDataAsTokens().destroy(); assertThat("created 1 temp files on get", filesCreated[0], is(1)); JSONRPCLayerTester.assertNoTempFilesExist(ws.getTempFilesManager()); filesCreated[0] = 0; ws.getObjectsSubSet(user, Arrays.asList(new SubObjectIdentifier(oi, new ObjectPaths(Arrays.asList("z"))))).get(0).getDataAsTokens().destroy(); assertThat("created 1 temp files on get subdata part object", filesCreated[0], is(1)); JSONRPCLayerTester.assertNoTempFilesExist(ws.getTempFilesManager()); filesCreated[0] = 0; ws.getObjectsSubSet(user, Arrays.asList(new SubObjectIdentifier(oi, new ObjectPaths(Arrays.asList("z", "y"))))).get(0).getDataAsTokens().destroy(); assertThat("created 2 temp files on get subdata full object", filesCreated[0], is(2)); JSONRPCLayerTester.assertNoTempFilesExist(ws.getTempFilesManager()); // test with multiple objects Map<String, Object> data2 = new LinkedHashMap<String, Object>(); data2.put("w", 1); data2.put("f", 2); //already sorted so no temp files will be created Map<String, Object> data3 = new LinkedHashMap<String, Object>(); data3.put("x", 1); data3.put("z", 2); objs.add(new WorkspaceSaveObject(data2, SAFE_TYPE1, null, p, false)); objs.add(new WorkspaceSaveObject(data3, SAFE_TYPE1, null, p, false)); //multiple objects in memory filesCreated[0] = 0; ws.setResourceConfig(build.withMaxIncomingDataMemoryUsage(39).build()); ws.saveObjects(user, wsi, objs, getIdFactory()); assertThat("created no temp files on save", filesCreated[0], is(0)); ws.setResourceConfig(build.withMaxReturnedDataMemoryUsage(39).build()); List<ObjectIdentifier> ois = Arrays.asList(new ObjectIdentifier(wsi, 3), new ObjectIdentifier(wsi, 4), new ObjectIdentifier(wsi, 5)); for (WorkspaceObjectData wod: ws.getObjects(user, ois)) { wod.getDataAsTokens().destroy(); } assertThat("created no temp files on get", filesCreated[0], is(0)); JSONRPCLayerTester.assertNoTempFilesExist(ws.getTempFilesManager()); //multiple objects to file ws.setResourceConfig(build.withMaxIncomingDataMemoryUsage(38).build()); filesCreated[0] = 0; ws.saveObjects(user, wsi, objs, getIdFactory()); //two files per data - 1 for relabeling, 1 for sort assertThat("created temp files on save", filesCreated[0], is(4)); JSONRPCLayerTester.assertNoTempFilesExist(ws.getTempFilesManager()); filesCreated[0] = 0; ws.setResourceConfig(build.withMaxReturnedDataMemoryUsage(38).build()); for (WorkspaceObjectData wod: ws.getObjects(user, ois)) { wod.getDataAsTokens().destroy(); } assertThat("created 1 temp files on get", filesCreated[0], is(1)); JSONRPCLayerTester.assertNoTempFilesExist(ws.getTempFilesManager()); filesCreated[0] = 0; ws.setResourceConfig(build.withMaxReturnedDataMemoryUsage(25).build()); for (WorkspaceObjectData wod: ws.getObjects(user, ois)) { wod.getDataAsTokens().destroy(); } assertThat("created 2 temp files on get", filesCreated[0], is(2)); JSONRPCLayerTester.assertNoTempFilesExist(ws.getTempFilesManager()); ws.getTempFilesManager().removeListener(listener); ws.setResourceConfig(oldcfg); } @Test public void storedDataIsSorted() throws Exception { WorkspaceUser user = new WorkspaceUser("dataIsSorted"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("dataissorted"); ws.createWorkspace(user, wsi.getIdentifierString(), false, null, null); Map<String, Object> data1 = new LinkedHashMap<String, Object>(); data1.put("z", 1); data1.put("y", 2); String expected = "{\"y\":2,\"z\":1}"; Provenance p = new Provenance(user); List<WorkspaceSaveObject> objs = new ArrayList<WorkspaceSaveObject>(); objs.add(new WorkspaceSaveObject(data1, SAFE_TYPE1, null, p, false)); ws.saveObjects(user, wsi, objs, getIdFactory()); WorkspaceObjectData o = ws.getObjects( user, Arrays.asList(new ObjectIdentifier(wsi, 1))).get(0); String data = IOUtils.toString(o.getDataAsTokens().getJSON()); assertThat("data is sorted", data, is(expected)); assertThat("data marked as sorted", o.getDataAsTokens().isSorted(), is(true)); } @Test public void exceedSortMemory() throws Exception { WorkspaceUser user = new WorkspaceUser("exceedSortMem"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("exceedsortmem"); ws.createWorkspace(user, wsi.getIdentifierString(), false, null, null); Provenance p = new Provenance(user); List<WorkspaceSaveObject> objs = new ArrayList<WorkspaceSaveObject>(); String safejson = "{\"z\":\"a\"}"; String json = "{\"z\":\"a\",\"b\":\"d\"}"; objs.add(new WorkspaceSaveObject(new JsonTokenStream(safejson), SAFE_TYPE1, null, p, false)); objs.add(new WorkspaceSaveObject(new JsonTokenStream(json), SAFE_TYPE1, null, p, false)); ResourceUsageConfiguration oldcfg = ws.getResourceConfig(); ResourceUsageConfigurationBuilder build = new ResourceUsageConfigurationBuilder(oldcfg) .withMaxIncomingDataMemoryUsage(1); int maxmem = 8 + 64 + 8 + 64; ws.setResourceConfig(build.withMaxRelabelAndSortMemoryUsage(maxmem).build()); ws.saveObjects(user, wsi, objs, getIdFactory()); ws.setResourceConfig(build.withMaxRelabelAndSortMemoryUsage(maxmem - 1).build()); try { ws.saveObjects(user, wsi, objs, getIdFactory()); fail("sorted w/ too little mem"); } catch (TypedObjectValidationException tove) { assertThat("got correct exception", tove.getMessage(), is("Object #2: Memory necessary for sorting map keys exceeds the limit " + (maxmem - 1) + " bytes at /")); } ws.setResourceConfig(oldcfg); } }
src/us/kbase/workspace/test/workspace/WorkspaceTest.java
package us.kbase.workspace.test.workspace; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.File; import java.io.StringReader; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeSet; import junit.framework.Assert; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.io.IOUtils; import org.junit.Test; import us.kbase.common.service.JsonTokenStream; import us.kbase.typedobj.core.AbsoluteTypeDefId; import us.kbase.typedobj.core.ObjectPaths; import us.kbase.typedobj.core.TempFileListener; import us.kbase.typedobj.core.TypeDefId; import us.kbase.typedobj.core.TypeDefName; import us.kbase.typedobj.db.FuncDetailedInfo; import us.kbase.typedobj.db.ModuleDefId; import us.kbase.typedobj.db.TypeDetailedInfo; import us.kbase.typedobj.exceptions.NoSuchFuncException; import us.kbase.typedobj.exceptions.NoSuchModuleException; import us.kbase.typedobj.exceptions.NoSuchPrivilegeException; import us.kbase.typedobj.exceptions.NoSuchTypeException; import us.kbase.typedobj.exceptions.TypedObjectExtractionException; import us.kbase.typedobj.exceptions.TypedObjectValidationException; import us.kbase.typedobj.idref.IdReferenceHandlerSetFactory; import us.kbase.typedobj.idref.IdReferenceType; import us.kbase.workspace.database.AllUsers; import us.kbase.workspace.database.ListObjectsParameters; import us.kbase.workspace.database.ModuleInfo; import us.kbase.workspace.database.ObjectChain; import us.kbase.workspace.database.ObjectIDNoWSNoVer; import us.kbase.workspace.database.ObjectIDResolvedWS; import us.kbase.workspace.database.ObjectIdentifier; import us.kbase.workspace.database.ObjectInformation; import us.kbase.workspace.database.Permission; import us.kbase.workspace.database.Provenance; import us.kbase.workspace.database.Provenance.ExternalData; import us.kbase.workspace.database.Reference; import us.kbase.workspace.database.ResourceUsageConfigurationBuilder; import us.kbase.workspace.database.UncheckedUserMetadata; import us.kbase.workspace.database.WorkspaceSaveObject; import us.kbase.workspace.database.Provenance.ProvenanceAction; import us.kbase.workspace.database.ResourceUsageConfigurationBuilder.ResourceUsageConfiguration; import us.kbase.workspace.database.SubObjectIdentifier; import us.kbase.workspace.database.User; import us.kbase.workspace.database.WorkspaceIdentifier; import us.kbase.workspace.database.WorkspaceInformation; import us.kbase.workspace.database.WorkspaceObjectData; import us.kbase.workspace.database.WorkspaceObjectInformation; import us.kbase.workspace.database.WorkspaceUser; import us.kbase.workspace.database.WorkspaceUserMetadata; import us.kbase.workspace.database.WorkspaceUserMetadata.MetadataSizeException; import us.kbase.workspace.database.exceptions.InaccessibleObjectException; import us.kbase.workspace.database.exceptions.NoSuchObjectException; import us.kbase.workspace.database.exceptions.NoSuchReferenceException; import us.kbase.workspace.database.exceptions.NoSuchWorkspaceException; import us.kbase.workspace.database.exceptions.PreExistingWorkspaceException; import us.kbase.workspace.exceptions.WorkspaceAuthorizationException; import us.kbase.workspace.test.kbase.JSONRPCLayerTester; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; public class WorkspaceTest extends WorkspaceTester { public WorkspaceTest(String config, String backend, Integer maxMemoryUsePerCall) throws Exception { super(config, backend, maxMemoryUsePerCall); } private static final WorkspaceIdentifier lockWS = new WorkspaceIdentifier("lock"); @Test public void workspaceDescription() throws Exception { WorkspaceInformation ltinfo = ws.createWorkspace(SOMEUSER, "lt", false, LONG_TEXT, null); WorkspaceInformation ltpinfo = ws.createWorkspace(SOMEUSER, "ltp", false, LONG_TEXT_PART, null); WorkspaceInformation ltninfo = ws.createWorkspace(SOMEUSER, "ltn", false, null, null); String desc = ws.getWorkspaceDescription(SOMEUSER, new WorkspaceIdentifier("lt")); assertThat("Workspace description incorrect", desc, is(LONG_TEXT.substring(0, 1000))); desc = ws.getWorkspaceDescription(SOMEUSER, new WorkspaceIdentifier("ltp")); assertThat("Workspace description incorrect", desc, is(LONG_TEXT_PART)); desc = ws.getWorkspaceDescription(SOMEUSER, new WorkspaceIdentifier("ltn")); assertNull("Workspace description incorrect", desc); ws.setWorkspaceDescription(SOMEUSER, new WorkspaceIdentifier("lt"), LONG_TEXT_PART); ws.setWorkspaceDescription(SOMEUSER, new WorkspaceIdentifier("ltp"), null); ws.setWorkspaceDescription(SOMEUSER, new WorkspaceIdentifier("ltn"), LONG_TEXT); WorkspaceInformation ltinfo2 = ws.getWorkspaceInformation(SOMEUSER, new WorkspaceIdentifier("lt")); WorkspaceInformation ltpinfo2 = ws.getWorkspaceInformation(SOMEUSER, new WorkspaceIdentifier("ltp")); WorkspaceInformation ltninfo2 = ws.getWorkspaceInformation(SOMEUSER, new WorkspaceIdentifier("ltn")); assertTrue("date updated on set ws desc", ltinfo2.getModDate().after(ltinfo.getModDate())); assertTrue("date updated on set ws desc", ltpinfo2.getModDate().after(ltpinfo.getModDate())); assertTrue("date updated on set ws desc", ltninfo2.getModDate().after(ltninfo.getModDate())); desc = ws.getWorkspaceDescription(SOMEUSER, new WorkspaceIdentifier("lt")); assertThat("Workspace description incorrect", desc, is(LONG_TEXT_PART)); desc = ws.getWorkspaceDescription(SOMEUSER, new WorkspaceIdentifier("ltp")); assertNull("Workspace description incorrect", desc); desc = ws.getWorkspaceDescription(SOMEUSER, new WorkspaceIdentifier("ltn")); assertThat("Workspace description incorrect", desc, is(LONG_TEXT.substring(0, 1000))); WorkspaceIdentifier wsi = new WorkspaceIdentifier("lt"); failSetWSDesc(AUSER, wsi, "foo", new WorkspaceAuthorizationException( "User a may not set description on workspace lt")); failSetWSDesc(null, wsi, "foo", new WorkspaceAuthorizationException( "Anonymous users may not set description on workspace lt")); ws.setPermissions(SOMEUSER, wsi, Arrays.asList(AUSER), Permission.WRITE); failSetWSDesc(AUSER, wsi, "foo", new WorkspaceAuthorizationException( "User a may not set description on workspace lt")); ws.setPermissions(SOMEUSER, wsi, Arrays.asList(AUSER), Permission.ADMIN); ws.setWorkspaceDescription(AUSER, wsi, "wooga"); assertThat("ws desc ok", ws.getWorkspaceDescription(SOMEUSER, wsi), is("wooga")); ws.setWorkspaceDeleted(SOMEUSER, wsi, true); failSetWSDesc(SOMEUSER, wsi, "foo", new NoSuchWorkspaceException( "Workspace lt is deleted", wsi)); ws.setWorkspaceDeleted(SOMEUSER, wsi, false); failSetWSDesc(SOMEUSER, new WorkspaceIdentifier("ltfake"), "foo", new NoSuchWorkspaceException( "No workspace with name ltfake exists", wsi)); try { ws.getWorkspaceDescription(BUSER, wsi); fail("Got ws desc w/o read perms"); } catch (WorkspaceAuthorizationException e) { assertThat("exception message ok", e.getLocalizedMessage(), is("User b may not read workspace lt")); } for (Permission p: Permission.values()) { if (p.compareTo(Permission.NONE) <= 0 || p.compareTo(Permission.OWNER) >= 0) { continue; } ws.setPermissions(SOMEUSER, wsi, Arrays.asList(BUSER), p); ws.getWorkspaceDescription(BUSER, wsi); //will fail if perms are wrong } ws.lockWorkspace(SOMEUSER, wsi); failSetWSDesc(SOMEUSER, wsi, "foo", new WorkspaceAuthorizationException( "The workspace with id " + ltinfo.getId() + ", name lt, is locked and may not be modified")); } @Test public void createWorkspaceAndGetInfo() throws Exception { String wsname = "foo_.-bar"; WorkspaceInformation info = ws.createWorkspace(SOMEUSER, wsname, false, "eeswaffertheen", null); checkWSInfo(info, SOMEUSER, wsname, 0, Permission.OWNER, false, "unlocked", MT_META); long id = info.getId(); WorkspaceIdentifier wsi = new WorkspaceIdentifier(id); Date moddate = info.getModDate(); info = ws.getWorkspaceInformation(SOMEUSER, new WorkspaceIdentifier(id)); checkWSInfo(info, SOMEUSER, wsname, 0, Permission.OWNER, false, id, moddate, "unlocked", MT_META); info = ws.getWorkspaceInformation(SOMEUSER, new WorkspaceIdentifier(wsname)); checkWSInfo(info, SOMEUSER, wsname, 0, Permission.OWNER, false, id, moddate, "unlocked", MT_META); Map<String, String> meta = new HashMap<String, String>(); meta.put("foo", "bar"); meta.put("baz", "bash"); WorkspaceInformation info2 = ws.createWorkspace(SOMEUSER, "foo2", true, "eeswaffertheen2", new WorkspaceUserMetadata(meta)); checkWSInfo(info2, SOMEUSER, "foo2", 0, Permission.OWNER, true, "unlocked", meta); checkWSInfo(new WorkspaceIdentifier("foo2"), SOMEUSER, "foo2", 0, Permission.OWNER, true, info2.getId(), info2.getModDate(), "unlocked", meta); try { ws.getWorkspaceInformation(BUSER, wsi); fail("Got metadata w/o read perms"); } catch (WorkspaceAuthorizationException e) { assertThat("exception message ok", e.getLocalizedMessage(), is("User b may not read workspace " + id)); } for (Permission p: Permission.values()) { if (p.compareTo(Permission.NONE) <= 0 || p.compareTo(Permission.OWNER) >= 0) { continue; } ws.setPermissions(SOMEUSER, wsi, Arrays.asList(BUSER), p); ws.getWorkspaceInformation(BUSER, wsi); //will fail if perms are wrong } WorkspaceUser anotheruser = new WorkspaceUser("anotherfnuser"); info = ws.createWorkspace(anotheruser, "anotherfnuser:MrT", true, "Ipitythefoolthatdon'teatMrTbreakfastcereal", null); checkWSInfo(info, anotheruser, "anotherfnuser:MrT", 0, Permission.OWNER, true, "unlocked", MT_META); id = info.getId(); moddate = info.getModDate(); info = ws.getWorkspaceInformation(anotheruser, new WorkspaceIdentifier(id)); checkWSInfo(info, anotheruser, "anotherfnuser:MrT", 0, Permission.OWNER, true, id, moddate, "unlocked", MT_META); info = ws.getWorkspaceInformation(anotheruser, new WorkspaceIdentifier("anotherfnuser:MrT")); checkWSInfo(info, anotheruser, "anotherfnuser:MrT", 0, Permission.OWNER, true, id, moddate, "unlocked", MT_META); //TODO BF these tests should be in the metadata class unit tests /* Map<String, String> bigmeta = new HashMap<String, String>(); for (int i = 0; i < 141; i++) { bigmeta.put("thing" + i, TEXT100); } ws.createWorkspace(SOMEUSER, "foo3", false, "eeswaffertheen", new WorkspaceUserMetadata(bigmeta)); bigmeta.put("thing", TEXT100); try { ws.createWorkspace(SOMEUSER, "foo4", false, "eeswaffertheen", bigmeta); fail("created ws with > 16kb metadata"); } catch (IllegalArgumentException iae) { assertThat("correct exception", iae.getLocalizedMessage(), is("Metadata size of 16076 is > 16000 bytes")); }*/ ws.setGlobalPermission(anotheruser, new WorkspaceIdentifier("anotherfnuser:MrT"), Permission.NONE); ws.setGlobalPermission(SOMEUSER, new WorkspaceIdentifier("foo2"), Permission.NONE); } @Test public void workspaceMetadata() throws Exception { WorkspaceUser user = new WorkspaceUser("blahblah"); WorkspaceUser user2 = new WorkspaceUser("blahblah2"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("workspaceMetadata"); WorkspaceIdentifier wsiNo = new WorkspaceIdentifier("workspaceNoMetadata"); WorkspaceIdentifier wsiNo2 = new WorkspaceIdentifier("workspaceNoMetadata2"); Map<String, String> meta = new HashMap<String, String>(); meta.put("foo", "bar"); meta.put("foo2", "bar2"); meta.put("some", "meta"); WorkspaceInformation info = ws.createWorkspace(user, wsi.getName(), false, null, new WorkspaceUserMetadata(meta)); ws.setPermissions(user, wsi, Arrays.asList(user2), Permission.ADMIN); checkWSInfo(info, user, wsi.getName(), 0, Permission.OWNER, false, info.getId(), info.getModDate(), "unlocked", meta); checkWSInfo(wsi, user, wsi.getName(), 0, Permission.OWNER, false, info.getId(), info.getModDate(), "unlocked", meta); WorkspaceInformation infoNo = ws.createWorkspace(user, wsiNo.getName(), false, null, null); checkWSInfo(infoNo, user, wsiNo.getName(), 0, Permission.OWNER, false, infoNo.getId(), infoNo.getModDate(), "unlocked", MT_META); checkWSInfo(wsiNo, user, wsiNo.getName(), 0, Permission.OWNER, false, infoNo.getId(), infoNo.getModDate(), "unlocked", MT_META); WorkspaceInformation infoNo2 = ws.createWorkspace(user, wsiNo2.getName(), false, null, null); meta.put("foo2", "bar3"); //replace Map<String, String> putmeta = new HashMap<String, String>(); putmeta.put("foo2", "bar3"); ws.setWorkspaceMetadata(user, wsi, new WorkspaceUserMetadata(putmeta)); Date d1 = checkWSInfo(wsi, user, wsi.getName(), 0, Permission.OWNER, false, info.getId(), "unlocked", meta); meta.put("foo3", "bar4"); //new putmeta.clear(); putmeta.put("foo3", "bar4"); ws.setWorkspaceMetadata(user, wsi, new WorkspaceUserMetadata(putmeta)); Date d2 = checkWSInfo(wsi, user, wsi.getName(), 0, Permission.OWNER, false, info.getId(), "unlocked", meta); putmeta.clear(); putmeta.put("foo3", "bar5"); //replace putmeta.put("some.garbage", "with.dots"); //new putmeta.put("foo", "whoa this is new"); //replace putmeta.put("no, this part is new", "prunker"); //new meta.put("foo3", "bar5"); meta.put("some.garbage", "with.dots"); meta.put("foo", "whoa this is new"); meta.put("no, this part is new", "prunker"); ws.setWorkspaceMetadata(user, wsi, new WorkspaceUserMetadata(putmeta)); Date d3 = checkWSInfo(wsi, user, wsi.getName(), 0, Permission.OWNER, false, info.getId(), "unlocked", meta); Map<String, String> newmeta = new HashMap<String, String>(); newmeta.put("new", "meta"); ws.setWorkspaceMetadata(user, wsiNo, new WorkspaceUserMetadata(newmeta)); Date nod1 = checkWSInfo(wsiNo, user, wsiNo.getName(), 0, Permission.OWNER, false, infoNo.getId(), "unlocked", newmeta); assertDatesAscending(infoNo.getModDate(), nod1); meta.remove("foo2"); ws.removeWorkspaceMetadata(user, wsi, "foo2"); Date d4 = checkWSInfo(wsi, user, wsi.getName(), 0, Permission.OWNER, false, info.getId(), "unlocked", meta); meta.remove("some"); ws.removeWorkspaceMetadata(user2, wsi, "some"); Date d5 = checkWSInfo(wsi, user, wsi.getName(), 0, Permission.OWNER, false, info.getId(), "unlocked", meta); ws.removeWorkspaceMetadata(user, wsi, "fake"); //no effect checkWSInfo(wsi, user, wsi.getName(), 0, Permission.OWNER, false, info.getId(), d5, "unlocked", meta); assertDatesAscending(info.getModDate(), d1, d2, d3, d4, d5); checkWSInfo(wsiNo2, user, wsiNo2.getName(), 0, Permission.OWNER, false, infoNo2.getId(), infoNo2.getModDate(), "unlocked", MT_META); ws.removeWorkspaceMetadata(user, wsiNo2, "somekey"); //should do nothing checkWSInfo(wsiNo2, user, wsiNo2.getName(), 0, Permission.OWNER, false, infoNo2.getId(), infoNo2.getModDate(), "unlocked", MT_META); ws.setPermissions(user, wsi, Arrays.asList(user2), Permission.WRITE); failWSMeta(user2, wsi, "foo", "val", new WorkspaceAuthorizationException( "User blahblah2 may not alter metadata for workspace workspaceMetadata")); failWSMeta(null, wsi, "foo", "val", new WorkspaceAuthorizationException( "Anonymous users may not alter metadata for workspace workspaceMetadata")); failWSMeta(user2, new WorkspaceIdentifier("thisiswayfake"), "foo", "val", new NoSuchWorkspaceException( "No workspace with name thisiswayfake exists", wsi)); ws.setWorkspaceDeleted(user, wsi, true); failWSMeta(user, wsi, "foo", "val", new NoSuchWorkspaceException( "Workspace workspaceMetadata is deleted", wsi)); ws.setWorkspaceDeleted(user, wsi, false); putmeta.clear(); for (int i = 0; i < 147; i++) { putmeta.put("" + i, TEXT100); } ws.createWorkspace(user, "wsmetafake", false, null, new WorkspaceUserMetadata(putmeta)); //should work failWSSetMeta(user, wsi, putmeta, new IllegalArgumentException( "Updated metadata exceeds allowed size of 16000B")); ws.setWorkspaceMetadata(user, wsiNo, new WorkspaceUserMetadata(putmeta)); //should work putmeta.put("148", TEXT100); failWSSetMeta(user, wsiNo2, putmeta, new MetadataSizeException( "Metadata exceeds maximum of 16000B")); failWSSetMeta(user, wsi, null, new IllegalArgumentException( "Metadata cannot be null or empty")); failWSSetMeta(user, wsi, MT_META, new IllegalArgumentException( "Metadata cannot be null or empty")); } @Test public void createWorkspaceAndWorkspaceIdentifierWithBadInput() throws Exception { class TestRig { public final WorkspaceUser user; public final String wsname; public final String excep; public TestRig(WorkspaceUser user, String wsname, String exception) { this.user = user; this.wsname = wsname; this.excep = exception; } } WorkspaceUser crap = new WorkspaceUser("afaeaafe"); List<TestRig> userWS = new ArrayList<TestRig>(); //test a few funny chars in the ws name userWS.add(new TestRig(crap, "afe_aff*afea", "Illegal character in workspace name afe_aff*afea: *")); userWS.add(new TestRig(crap, "afe_aff%afea", "Illegal character in workspace name afe_aff%afea: %")); userWS.add(new TestRig(crap, "afeaff/af*ea", "Illegal character in workspace name afeaff/af*ea: /")); userWS.add(new TestRig(crap, "af?eaff*afea", "Illegal character in workspace name af?eaff*afea: ?")); userWS.add(new TestRig(crap, "64", "Workspace names cannot be integers: 64")); //check missing ws name userWS.add(new TestRig(crap, null, "Workspace name cannot be null or the empty string")); userWS.add(new TestRig(crap, "", "Workspace name cannot be null or the empty string")); //check long names userWS.add(new TestRig(crap, TEXT256, "Workspace name exceeds the maximum length of 255")); //check missing user and/or workspace name in compound name userWS.add(new TestRig(crap, ":", "Workspace name missing from :")); userWS.add(new TestRig(crap, "foo:", "Workspace name missing from foo:")); userWS.add(new TestRig(crap, ":foo", "User name missing from :foo")); //check multiple delims userWS.add(new TestRig(crap, "foo:a:foo", "Workspace name foo:a:foo may only contain one : delimiter")); userWS.add(new TestRig(crap, "foo::foo", "Workspace name foo::foo may only contain one : delimiter")); for (TestRig testdata: userWS) { String wksps = testdata.wsname; try { new WorkspaceIdentifier(wksps); fail(String.format("able to create workspace identifier with illegal input ws %s", wksps)); } catch (IllegalArgumentException e) { assertThat("incorrect exception message", e.getLocalizedMessage(), is(testdata.excep)); } } //check missing user userWS.add(new TestRig(null, "foo", "user cannot be null")); //user must match prefix userWS.add(new TestRig(SOMEUSER, "notauser:foo", "Workspace name notauser:foo must only contain the user name " + SOMEUSER.getUser() + " prior to the : delimiter")); //no ints userWS.add(new TestRig(new WorkspaceUser("foo"), "foo:64", "Workspace names cannot be integers: foo:64")); for (TestRig testdata: userWS) { WorkspaceUser user = testdata.user; String wksps = testdata.wsname; try { ws.createWorkspace(user, wksps, false, "iswaffertheen", null); fail(String.format("able to create workspace with illegal input user: %s ws %s", user, wksps)); } catch (IllegalArgumentException e) { assertThat("incorrect exception message", e.getLocalizedMessage(), is(testdata.excep)); } try { new WorkspaceIdentifier(wksps, user); fail(String.format("able to create workspace identifier with illegal input user: %s ws %s", user, wksps)); } catch (IllegalArgumentException e) { assertThat("incorrect exception message", e.getLocalizedMessage(), is(testdata.excep)); } } } @Test public void preExistingWorkspace() throws Exception { ws.createWorkspace(AUSER, "preexist", false, null, null); failCreateWorkspace(BUSER, "preexist", false, null, null, new PreExistingWorkspaceException("Workspace name preexist is already in use")); ws.setWorkspaceDeleted(AUSER, new WorkspaceIdentifier("preexist"), true); failCreateWorkspace(BUSER, "preexist", false, null, null, new PreExistingWorkspaceException("Workspace name preexist is already in use")); failCreateWorkspace(AUSER, "preexist", false, null, null, new PreExistingWorkspaceException( "Workspace name preexist is already in use by a deleted workspace")); } @Test public void createIllegalUser() throws Exception { try { new WorkspaceUser("*"); fail("able to create user with illegal character"); } catch (IllegalArgumentException e) { assertThat("exception message correct", e.getLocalizedMessage(), is("Illegal character in user name *: *")); } try { new WorkspaceUser(null); fail("able to create user with null"); } catch (IllegalArgumentException e) { assertThat("exception message correct", e.getLocalizedMessage(), is("Username cannot be null or the empty string")); } try { new WorkspaceUser(""); fail("able to create user with empty string"); } catch (IllegalArgumentException e) { assertThat("exception message correct", e.getLocalizedMessage(), is("Username cannot be null or the empty string")); } try { new WorkspaceUser(TEXT101); fail("able to create user with long string"); } catch (IllegalArgumentException e) { assertThat("exception message correct", e.getLocalizedMessage(), is("Username exceeds the maximum length of 100")); } try { new AllUsers('$'); fail("able to create AllUser with illegal char"); } catch (IllegalArgumentException e) { assertThat("exception message correct", e.getLocalizedMessage(), is("Disallowed character: $")); } } @Test public void setWorkspaceOwner() throws Exception { WorkspaceUser u1 = new WorkspaceUser("foo"); WorkspaceUser u2 = new WorkspaceUser("bar"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("wsfoo"); ws.createWorkspace(u1, wsi.getName(), false, null, null); Map<String, String> mt = new HashMap<String, String>(); //basic test WorkspaceInformation wsinfo = ws.setWorkspaceOwner(u1, wsi, u2, null, false); checkWSInfo(wsinfo, u2, wsi.getName(), 0L, Permission.OWNER, false, "unlocked", mt); Map<User, Permission> pexp = new HashMap<User, Permission>(); pexp.put(u1, Permission.ADMIN); pexp.put(u2, Permission.OWNER); assertThat("permissions correct", ws.getPermissions( u2, Arrays.asList(wsi)).get(0), is (pexp)); failSetWorkspaceOwner(null, wsi, u2, null, true, new IllegalArgumentException("bar already owns workspace wsfoo")); failSetWorkspaceOwner(u2, wsi, u2, null, false, new IllegalArgumentException("bar already owns workspace wsfoo")); failSetWorkspaceOwner(null, wsi, null, null, true, new NullPointerException("newUser cannot be null")); failSetWorkspaceOwner(u2, wsi, null, null, false, new NullPointerException("newUser cannot be null")); failSetWorkspaceOwner(null, null, u1, null, true, new NullPointerException("wsi cannot be null")); failSetWorkspaceOwner(u2, null, u1, null, false, new NullPointerException("wsi cannot be null")); WorkspaceIdentifier fake = new WorkspaceIdentifier("wsfoofake"); failSetWorkspaceOwner(null, fake, u2, null, true, new NoSuchWorkspaceException("No workspace with name wsfoofake exists", fake)); failSetWorkspaceOwner(u2, fake, u2, null, false, new NoSuchWorkspaceException("No workspace with name wsfoofake exists", fake)); failSetWorkspaceOwner(null, wsi, u1, null, false, new WorkspaceAuthorizationException("Anonymous users may not change the owner of workspace wsfoo")); failSetWorkspaceOwner(u1, wsi, u1, null, false, new WorkspaceAuthorizationException("User foo may not change the owner of workspace wsfoo")); //test as admin wsinfo = ws.setWorkspaceOwner(null, wsi, u1, null, true); checkWSInfo(wsinfo, u1, wsi.getName(), 0L, Permission.OWNER, false, "unlocked", mt); pexp.put(u1, Permission.OWNER); pexp.put(u2, Permission.ADMIN); assertThat("permissions correct", ws.getPermissions( u2, Arrays.asList(wsi)).get(0), is (pexp)); //test basic name change wsinfo = ws.setWorkspaceOwner(u1, wsi, u2, "wsfoonew", false); checkWSInfo(wsinfo, u2, "wsfoonew", 0L, Permission.OWNER, false, "unlocked", mt); wsi = new WorkspaceIdentifier("wsfoonew"); //illegal name change to invalid user failSetWorkspaceOwner(u2, wsi, u1, "bar:wsfoo", false, new IllegalArgumentException("Workspace name bar:wsfoo must only contain the user name foo prior to the : delimiter")); failSetWorkspaceOwner(null, wsi, u1, "bar:wsfoo", true, new IllegalArgumentException("Workspace name bar:wsfoo must only contain the user name foo prior to the : delimiter")); //test auto rename of workspace ws.renameWorkspace(u2, wsi, "bar:wsfoo"); wsi = new WorkspaceIdentifier("bar:wsfoo"); wsinfo = ws.setWorkspaceOwner(u2, wsi, u1, null, false); wsi = new WorkspaceIdentifier("foo:wsfoo"); checkWSInfo(wsinfo, u1, wsi.getName(), 0L, Permission.OWNER, false, "unlocked", mt); //test manual rename of workspace wsinfo = ws.setWorkspaceOwner(u1, wsi, u2, "bar:wsfoo", false); wsi = new WorkspaceIdentifier("bar:wsfoo"); checkWSInfo(wsinfo, u2, wsi.getName(), 0L, Permission.OWNER, false, "unlocked", mt); //test rename to preexisting workspace ws.createWorkspace(u1, "foo:wsfoo2", false, null, null); failSetWorkspaceOwner(u2, wsi, u1, "foo:wsfoo2", false, new IllegalArgumentException("There is already a workspace named foo:wsfoo2")); failSetWorkspaceOwner(null, wsi, u1, "foo:wsfoo2", true, new IllegalArgumentException("There is already a workspace named foo:wsfoo2")); //test rename with same name ws.renameWorkspace(u2, wsi, "wsfoo"); wsi = new WorkspaceIdentifier("wsfoo"); wsinfo = ws.setWorkspaceOwner(u2, wsi, u1, "wsfoo", false); checkWSInfo(wsinfo, u1, wsi.getName(), 0L, Permission.OWNER, false, "unlocked", mt); } private void failSetWorkspaceOwner(WorkspaceUser user, WorkspaceIdentifier wsi, WorkspaceUser newuser, String name, boolean asAdmin, Exception expected) throws Exception { try { ws.setWorkspaceOwner(user, wsi, newuser, name, asAdmin); fail("expected set owner to fail"); } catch (Exception got) { assertThat("correct exception", got.getLocalizedMessage(), is(expected.getLocalizedMessage())); assertThat("correct exception type", got, is(expected.getClass())); } } @Test public void permissionsBulk() throws Exception { /* This test was added after the getPermissions method was converted * to take a list of workspaces rather than a single workspace. * Hence it mostly tests the aspects of the method dealing with * multiple workspaces - the prior tests, which exercise the same * method, test the remainder of the functionality. */ WorkspaceIdentifier wiow = new WorkspaceIdentifier("permmass-owner"); WorkspaceIdentifier wiad = new WorkspaceIdentifier("permmass-admin"); WorkspaceIdentifier wiwr = new WorkspaceIdentifier("permmass-write"); WorkspaceIdentifier wird = new WorkspaceIdentifier("permmass-read"); WorkspaceIdentifier wigr = new WorkspaceIdentifier("permmass-globalread"); WorkspaceIdentifier wino = new WorkspaceIdentifier("permmass-none"); ws.createWorkspace(AUSER, wiow.getName(), false, null, null).getId(); ws.createWorkspace(BUSER, wiad.getName(), false, null, null).getId(); ws.createWorkspace(BUSER, wiwr.getName(), false, null, null).getId(); ws.createWorkspace(CUSER, wird.getName(), false, null, null).getId(); ws.createWorkspace(CUSER, wigr.getName(), false, null, null).getId(); ws.createWorkspace(CUSER, wino.getName(), false, null, null).getId(); ws.setPermissions(BUSER, wiad, Arrays.asList(AUSER), Permission.ADMIN); ws.setPermissions(BUSER, wiwr, Arrays.asList(AUSER), Permission.WRITE); ws.setPermissions(CUSER, wird, Arrays.asList(AUSER), Permission.READ); ws.setGlobalPermission(CUSER, wigr, Permission.READ); List<WorkspaceIdentifier> wsis = new LinkedList<WorkspaceIdentifier>( Arrays.asList(wiow, wiad, wiwr, wird, wigr, wino)); Map<User, Permission> e1 = new HashMap<User, Permission>(); e1.put(AUSER, Permission.OWNER); Map<User, Permission> e2 = new HashMap<User, Permission>(); e2.put(AUSER, Permission.ADMIN); e2.put(BUSER, Permission.OWNER); Map<User, Permission> e3 = new HashMap<User, Permission>(); e3.put(AUSER, Permission.WRITE); e3.put(BUSER, Permission.OWNER); Map<User, Permission> e4 = new HashMap<User, Permission>(); e4.put(AUSER, Permission.READ); Map<User, Permission> e5 = new HashMap<User, Permission>(); e5.put(AUSER, Permission.NONE); e5.put(STARUSER, Permission.READ); Map<User, Permission> e6 = new HashMap<User, Permission>(); e6.put(AUSER, Permission.NONE); List<Map<User, Permission>> exp = Arrays.asList(e1, e2, e3, e4, e5, e6); List<Map<User, Permission>> got = ws.getPermissions(AUSER, wsis); assertThat("got correct mass permissions", got, is(exp)); ws.setGlobalPermission(CUSER, wigr, Permission.NONE); failGetPermissions(AUSER, null, new NullPointerException( "wslist cannot be null")); List<WorkspaceIdentifier> huge = new LinkedList<WorkspaceIdentifier>(); for (int i = 1; i <= 1002; i++) { huge.add(new WorkspaceIdentifier(i)); } failGetPermissions(AUSER, huge, new IllegalArgumentException( "Maximum number of workspaces allowed for input is 1000")); ws.setWorkspaceDeleted(AUSER, wiow, true); failGetPermissions(AUSER, wsis, new NoSuchWorkspaceException( String.format("Workspace %s is deleted", wiow.getName()), wiow)); ws.setWorkspaceDeleted(AUSER, wiow, false); wsis.add(new WorkspaceIdentifier("permmass-doesntexist")); failGetPermissions(AUSER, wsis, new NoSuchWorkspaceException( "No workspace with name permmass-doesntexist exists", wiow)); wsis.add(new WorkspaceIdentifier(100000000)); failGetPermissions(AUSER, wsis, new NoSuchWorkspaceException( "No workspace with id 100000000 exists", wiow)); } @Test public void permissions() throws Exception { //setup WorkspaceIdentifier wsiNG = new WorkspaceIdentifier("perms_noglobal"); ws.createWorkspace(AUSER, "perms_noglobal", false, null, null); WorkspaceIdentifier wsiGL = new WorkspaceIdentifier("perms_global"); ws.createWorkspace(AUSER, "perms_global", true, "globaldesc", null); Map<User, Permission> expect = new HashMap<User, Permission>(); //try some illegal ops try { ws.getWorkspaceDescription(null, wsiNG); fail("Able to get private workspace description with no user name"); } catch (Exception e) { assertThat("Correct exception message", e.getLocalizedMessage(), is("Anonymous users may not read workspace perms_noglobal")); } try { ws.getWorkspaceInformation(null, wsiNG); fail("Able to get private workspace metadata with no user name"); } catch (WorkspaceAuthorizationException e) { assertThat("Correct exception message", e.getLocalizedMessage(), is("Anonymous users may not read workspace perms_noglobal")); } failSetPermissions(null, wsiNG, Arrays.asList(AUSER, BUSER, CUSER), Permission.READ, new WorkspaceAuthorizationException( "Anonymous users may not set permissions on workspace perms_noglobal")); failSetPermissions(null, wsiNG, null, Permission.READ, new IllegalArgumentException("The users list may not be null or empty")); failSetPermissions(null, wsiNG, new LinkedList<WorkspaceUser>(), Permission.READ, new IllegalArgumentException("The users list may not be null or empty")); failSetPermissions(AUSER, wsiNG, Arrays.asList(AUSER, BUSER, CUSER), Permission.OWNER, new IllegalArgumentException("Cannot set owner permission")); failSetPermissions(BUSER, wsiNG, Arrays.asList(AUSER, BUSER, CUSER), Permission.READ, new WorkspaceAuthorizationException("User b may not set permissions on workspace perms_noglobal")); //check basic permissions for new private and public workspaces expect.put(AUSER, Permission.OWNER); assertThat("ws has correct perms for owner", ws.getPermissions( AUSER, Arrays.asList(wsiNG)).get(0), is(expect)); expect.put(STARUSER, Permission.READ); assertThat("ws has correct perms for owner", ws.getPermissions( AUSER, Arrays.asList(wsiGL)).get(0), is(expect)); expect.clear(); expect.put(BUSER, Permission.NONE); assertThat("ws has correct perms for random user", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); expect.put(STARUSER, Permission.READ); assertThat("ws has correct perms for random user", ws.getPermissions( BUSER, Arrays.asList(wsiGL)).get(0), is(expect)); //test read permissions assertThat("can read public workspace description", ws.getWorkspaceDescription(null, wsiGL), is("globaldesc")); WorkspaceInformation info = ws.getWorkspaceInformation(null, wsiGL); checkWSInfo(info, AUSER, "perms_global", 0, Permission.NONE, true, "unlocked", MT_META); ws.setPermissions(AUSER, wsiNG, Arrays.asList(AUSER, BUSER, CUSER), Permission.READ); expect.clear(); expect.put(AUSER, Permission.OWNER); expect.put(BUSER, Permission.READ); expect.put(CUSER, Permission.READ); assertThat("ws doesn't replace owner perms", ws.getPermissions( AUSER, Arrays.asList(wsiNG)).get(0), is(expect)); expect.clear(); expect.put(BUSER, Permission.READ); assertThat("no permission leakage", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); failSetPermissions(BUSER, wsiNG, Arrays.asList(AUSER, BUSER, CUSER), Permission.READ, new WorkspaceAuthorizationException( "User b may not alter other user's permissions on workspace perms_noglobal")); failSetPermissions(BUSER, wsiNG, Arrays.asList(BUSER), Permission.WRITE, new WorkspaceAuthorizationException( "User b may only reduce their permission level on workspace perms_noglobal")); //asAdmin testing ws.setPermissions(BUSER, wsiNG, Arrays.asList(BUSER), Permission.ADMIN, true); expect.put(AUSER, Permission.OWNER); expect.put(BUSER, Permission.ADMIN); expect.put(CUSER, Permission.READ); assertThat("asAdmin boolean works", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); ws.setPermissions(BUSER, wsiNG, Arrays.asList(BUSER), Permission.READ); expect.clear(); expect.put(BUSER, Permission.READ); assertThat("reduce own permissions", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); ws.setPermissions(null, wsiNG, Arrays.asList(BUSER), Permission.ADMIN, true); expect.put(AUSER, Permission.OWNER); expect.put(BUSER, Permission.ADMIN); expect.put(CUSER, Permission.READ); assertThat("asAdmin boolean works with null user",ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); ws.setPermissions(AUSER, wsiNG, Arrays.asList(BUSER), Permission.READ); expect.clear(); expect.put(BUSER, Permission.READ); assertThat("reduced permissions", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); ws.setPermissions(BUSER, wsiNG, Arrays.asList(BUSER), Permission.READ); //should have no effect expect.clear(); expect.put(AUSER, Permission.OWNER); expect.put(BUSER, Permission.READ); expect.put(CUSER, Permission.READ); assertThat("user setting same perms has no effect", ws.getPermissions( AUSER, Arrays.asList(wsiNG)).get(0), is(expect)); expect.clear(); expect.put(BUSER, Permission.READ); assertThat("setting own perms to same has no effect", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); ws.setPermissions(BUSER, wsiNG, Arrays.asList(BUSER), Permission.NONE); expect.clear(); expect.put(AUSER, Permission.OWNER); expect.put(CUSER, Permission.READ); assertThat("user removed own perms", ws.getPermissions( AUSER, Arrays.asList(wsiNG)).get(0), is(expect)); expect.clear(); expect.put(BUSER, Permission.NONE); assertThat("can remove own perms", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); //test write permissions ws.setPermissions(AUSER, wsiNG, Arrays.asList(BUSER), Permission.WRITE); expect.put(AUSER, Permission.OWNER); expect.put(BUSER, Permission.WRITE); expect.put(CUSER, Permission.READ); assertThat("ws doesn't replace owner perms", ws.getPermissions( AUSER, Arrays.asList(wsiNG)).get(0), is(expect)); assertThat("write perms allow viewing all perms", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); expect.clear(); expect.put(CUSER, Permission.READ); assertThat("no permission leakage", ws.getPermissions( CUSER, Arrays.asList(wsiNG)).get(0), is(expect)); failSetPermissions(BUSER, wsiNG, Arrays.asList(AUSER, BUSER, CUSER), Permission.READ, new WorkspaceAuthorizationException( "User b may not alter other user's permissions on workspace perms_noglobal")); //test admin permissions ws.setPermissions(AUSER, wsiNG, Arrays.asList(BUSER), Permission.ADMIN); expect.put(AUSER, Permission.OWNER); expect.put(BUSER, Permission.ADMIN); expect.put(CUSER, Permission.READ); assertThat("ws doesn't replace owner perms", ws.getPermissions( AUSER, Arrays.asList(wsiNG)).get(0), is(expect)); assertThat("admin can see all perms", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); ws.setPermissions(BUSER, wsiNG, Arrays.asList(AUSER, CUSER), Permission.WRITE); expect.put(CUSER, Permission.WRITE); assertThat("ws doesn't replace owner perms", ws.getPermissions( AUSER, Arrays.asList(wsiNG)).get(0), is(expect)); assertThat("admin can correctly set perms", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); //test remove permissions ws.setPermissions(BUSER, wsiNG, Arrays.asList(AUSER, CUSER), Permission.NONE); expect.remove(CUSER); assertThat("ws doesn't replace owner perms", ws.getPermissions( AUSER, Arrays.asList(wsiNG)).get(0), is(expect)); assertThat("admin can't overwrite owner perms", ws.getPermissions( BUSER, Arrays.asList(wsiNG)).get(0), is(expect)); ws.setGlobalPermission(AUSER, new WorkspaceIdentifier("perms_global"), Permission.NONE); } @Test public void permissionsWithNoUser() throws Exception { /* Tests the case that no user credentials are supplied and thus the * user is null. Only globally readable workspaces should return * permissions other than NONE. */ WorkspaceIdentifier wsiNG = new WorkspaceIdentifier("PnoU_noglobal"); ws.createWorkspace(AUSER, "PnoU_noglobal", false, null, null); WorkspaceIdentifier wsiGL = new WorkspaceIdentifier("PnoU_global"); ws.createWorkspace(AUSER, "PnoU_global", true, "globaldesc", null); Map<User, Permission> expect = new HashMap<User, Permission>(); assertThat("No permissions for private WS", ws.getPermissions(null, Arrays.asList(wsiNG)).get(0), is(expect)); expect.put(STARUSER, Permission.READ); assertThat("Read permissions for public WS", ws.getPermissions(null, Arrays.asList(wsiGL)).get(0), is(expect)); ws.setGlobalPermission(AUSER, wsiGL, Permission.NONE); } @Test public void saveObjectsAndGetMetaSimple() throws Exception { WorkspaceUser foo = new WorkspaceUser("foo"); WorkspaceUser bar = new WorkspaceUser("bar"); IdReferenceHandlerSetFactory foofac = getIdFactory(); IdReferenceHandlerSetFactory barfac = getIdFactory(); WorkspaceIdentifier read = new WorkspaceIdentifier("saveobjread"); WorkspaceIdentifier priv = new WorkspaceIdentifier("saveobj"); WorkspaceInformation readinfo = ws.createWorkspace( foo, read.getIdentifierString(), true, null, null); WorkspaceInformation privinfo = ws.createWorkspace( foo, priv.getIdentifierString(), false, null, null); Date readLastDate = readinfo.getModDate(); Date privLastDate = privinfo.getModDate(); long readid = readinfo.getId(); long privid = privinfo.getId(); Map<String, Object> data = new HashMap<String, Object>(); Map<String, Object> data2 = new HashMap<String, Object>(); Map<String, String> premeta = new HashMap<String, String>(); Map<String, Object> moredata = new HashMap<String, Object>(); moredata.put("foo", "bar"); data.put("fubar", moredata); JsonNode savedata = MAPPER.valueToTree(data); data2.put("fubar2", moredata); JsonNode savedata2 = MAPPER.valueToTree(data2); premeta.put("metastuff", "meta"); WorkspaceUserMetadata meta = new WorkspaceUserMetadata(premeta); Map<String, String> premeta2 = new HashMap<String, String>(); premeta2.put("meta2", "my hovercraft is full of eels"); WorkspaceUserMetadata meta2 = new WorkspaceUserMetadata(premeta2); Provenance p = new Provenance(new WorkspaceUser("kbasetest2")); p.addAction(new Provenance.ProvenanceAction().withServiceName("some service")); List<WorkspaceSaveObject> objects = new ArrayList<WorkspaceSaveObject>(); try { ws.saveObjects(foo, read, objects, foofac); fail("Saved no objects"); } catch (IllegalArgumentException e) { assertThat("correct except", e.getLocalizedMessage(), is("No data provided")); } failGetObjects(foo, new ArrayList<ObjectIdentifier>(), new IllegalArgumentException( "No object identifiers provided")); try { ws.getObjectInformation(foo, new ArrayList<ObjectIdentifier>(), true, false); fail("called method with no identifiers"); } catch (IllegalArgumentException e) { assertThat("correct except", e.getLocalizedMessage(), is("No object identifiers provided")); } objects.add(new WorkspaceSaveObject(new ObjectIDNoWSNoVer("auto3"), savedata, SAFE_TYPE1, meta, p, false)); objects.add(new WorkspaceSaveObject(new ObjectIDNoWSNoVer("auto3"), savedata2, SAFE_TYPE1, meta2, p, false)); objects.add(new WorkspaceSaveObject(new ObjectIDNoWSNoVer("auto3-1"), savedata, SAFE_TYPE1, meta, p, false)); objects.add(new WorkspaceSaveObject(savedata2, SAFE_TYPE1, meta2, p, false)); objects.add(new WorkspaceSaveObject(savedata, SAFE_TYPE1, meta, p, false)); readLastDate = ws.getWorkspaceInformation(foo, read).getModDate(); List<ObjectInformation> objinfo = ws.saveObjects(foo, read, objects, foofac); readLastDate = assertWorkspaceDateUpdated(foo, read, readLastDate, "ws date modified on save"); String chksum1 = "36c4f68f2c98971b9736839232eb08f4"; String chksum2 = "3c59f762140806c36ab48a152f28e840"; checkObjInfo(objinfo.get(0), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, premeta); checkObjInfo(objinfo.get(1), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo.get(2), 2, "auto3-1", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, premeta); checkObjInfo(objinfo.get(3), 3, "auto3-2", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo.get(4), 4, "auto4", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, premeta); List<ObjectIdentifier> loi = new ArrayList<ObjectIdentifier>(); loi.add(new ObjectIdentifier(read, 1)); loi.add(new ObjectIdentifier(read, 1, 1)); loi.add(new ObjectIdentifier(new WorkspaceIdentifier(readid), "auto3")); loi.add(new ObjectIdentifier(new WorkspaceIdentifier(readid), "auto3", 1)); loi.add(new ObjectIdentifier(new WorkspaceIdentifier(readid), 1)); loi.add(new ObjectIdentifier(new WorkspaceIdentifier(readid), 1, 1)); loi.add(new ObjectIdentifier(read, "auto3")); loi.add(new ObjectIdentifier(read, "auto3", 1)); loi.add(new ObjectIdentifier(read, "auto3-2")); loi.add(new ObjectIdentifier(read, 3)); loi.add(new ObjectIdentifier(read, "auto3-2", 1)); loi.add(new ObjectIdentifier(read, 3, 1)); List<ObjectInformation> objinfo2 = ws.getObjectInformation(foo, loi, true, false); List<ObjectInformation> objinfo2NoMeta = ws.getObjectInformation(foo, loi, false, false); checkObjInfo(objinfo2.get(0), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo2.get(1), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, premeta); checkObjInfo(objinfo2.get(2), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo2.get(3), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, premeta); checkObjInfo(objinfo2.get(4), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo2.get(5), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, premeta); checkObjInfo(objinfo2.get(6), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo2.get(7), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, premeta); checkObjInfo(objinfo2.get(8), 3, "auto3-2", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo2.get(9), 3, "auto3-2", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo2.get(10), 3, "auto3-2", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo2.get(11), 3, "auto3-2", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum2, 24, premeta2); checkObjInfo(objinfo2NoMeta.get(0), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, null); checkObjInfo(objinfo2NoMeta.get(1), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, null); checkObjInfo(objinfo2NoMeta.get(2), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, null); checkObjInfo(objinfo2NoMeta.get(3), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, null); checkObjInfo(objinfo2NoMeta.get(4), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, null); checkObjInfo(objinfo2NoMeta.get(5), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, null); checkObjInfo(objinfo2NoMeta.get(6), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, null); checkObjInfo(objinfo2NoMeta.get(7), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, null); checkObjInfo(objinfo2NoMeta.get(8), 3, "auto3-2", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum2, 24, null); checkObjInfo(objinfo2NoMeta.get(9), 3, "auto3-2", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum2, 24, null); checkObjInfo(objinfo2NoMeta.get(10), 3, "auto3-2", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum2, 24, null); checkObjInfo(objinfo2NoMeta.get(11), 3, "auto3-2", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum2, 24, null); List<FakeObjectInfo> retinfo = new ArrayList<FakeObjectInfo>(); FakeResolvedWSID fakews = new FakeResolvedWSID(read.getName(), readid); UncheckedUserMetadata umeta = new UncheckedUserMetadata(meta); UncheckedUserMetadata umeta2 = new UncheckedUserMetadata(meta2); retinfo.add(new FakeObjectInfo(1L, "auto3", SAFE_TYPE1.getTypeString(), new Date(), 2, foo, fakews, chksum2, 24L, umeta2)); retinfo.add(new FakeObjectInfo(1, "auto3", SAFE_TYPE1.getTypeString(), new Date(), 1, foo, fakews, chksum1, 23, umeta)); retinfo.add(new FakeObjectInfo(1, "auto3", SAFE_TYPE1.getTypeString(), new Date(), 2, foo, fakews, chksum2, 24, umeta2)); retinfo.add(new FakeObjectInfo(1, "auto3", SAFE_TYPE1.getTypeString(), new Date(), 1, foo, fakews, chksum1, 23, umeta)); retinfo.add(new FakeObjectInfo(1, "auto3", SAFE_TYPE1.getTypeString(), new Date(), 2, foo, fakews, chksum2, 24, umeta2)); retinfo.add(new FakeObjectInfo(1, "auto3", SAFE_TYPE1.getTypeString(), new Date(), 1, foo, fakews, chksum1, 23, umeta)); retinfo.add(new FakeObjectInfo(1, "auto3", SAFE_TYPE1.getTypeString(), new Date(), 2, foo, fakews, chksum2, 24, umeta2)); retinfo.add(new FakeObjectInfo(1, "auto3", SAFE_TYPE1.getTypeString(), new Date(), 1, foo, fakews, chksum1, 23, umeta)); retinfo.add(new FakeObjectInfo(3, "auto3-2", SAFE_TYPE1.getTypeString(), new Date(), 1, foo, fakews, chksum2, 24, umeta2)); retinfo.add(new FakeObjectInfo(3, "auto3-2", SAFE_TYPE1.getTypeString(), new Date(), 1, foo, fakews, chksum2, 24, umeta2)); retinfo.add(new FakeObjectInfo(3, "auto3-2", SAFE_TYPE1.getTypeString(), new Date(), 1, foo, fakews, chksum2, 24, umeta2)); retinfo.add(new FakeObjectInfo(3, "auto3-2", SAFE_TYPE1.getTypeString(), new Date(), 1, foo, fakews, chksum2, 24, umeta2)); List<Map<String, Object>> retdata = Arrays.asList( data2, data, data2, data, data2, data, data2, data, data2, data2, data2, data2); checkObjectAndInfo(foo, loi, retinfo, retdata); privLastDate = ws.getWorkspaceInformation(foo, priv).getModDate(); ws.saveObjects(foo, priv, objects, foofac); privLastDate = assertWorkspaceDateUpdated(foo, read, privLastDate, "ws date modified on save"); objects.clear(); objects.add(new WorkspaceSaveObject(new ObjectIDNoWSNoVer(2), savedata, SAFE_TYPE1, meta2, p, false)); objinfo = ws.saveObjects(foo, read, objects, foofac); ws.saveObjects(foo, priv, objects, foofac); checkObjInfo(objinfo.get(0), 2, "auto3-1", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum1, 23, premeta2); objinfo2 = ws.getObjectInformation(foo, Arrays.asList(new ObjectIdentifier(read, 2)), true, false); checkObjInfo(objinfo2.get(0), 2, "auto3-1", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum1, 23, premeta2); ws.getObjectInformation(bar, Arrays.asList(new ObjectIdentifier(read, 2)), true, false); //should work try { ws.getObjectInformation(bar, Arrays.asList(new ObjectIdentifier(priv, 2)), true, false); fail("Able to get obj meta from private workspace"); } catch (InaccessibleObjectException ioe) { assertThat("correct exception message", ioe.getLocalizedMessage(), is("Object 2 cannot be accessed: User bar may not read workspace saveobj")); assertThat("correct object returned", ioe.getInaccessibleObject(), is(new ObjectIdentifier(priv, 2))); } successGetObjects(bar, Arrays.asList(new ObjectIdentifier(read, 2))); try { ws.getObjects(bar, Arrays.asList(new ObjectIdentifier(priv, 2))); fail("Able to get obj data from private workspace"); } catch (InaccessibleObjectException ioe) { assertThat("correct exception message", ioe.getLocalizedMessage(), is("Object 2 cannot be accessed: User bar may not read workspace saveobj")); assertThat("correct object returned", ioe.getInaccessibleObject(), is(new ObjectIdentifier(priv, 2))); } ws.setPermissions(foo, priv, Arrays.asList(bar), Permission.READ); objinfo2 = ws.getObjectInformation(bar, Arrays.asList(new ObjectIdentifier(priv, 2)), true, false); checkObjInfo(objinfo2.get(0), 2, "auto3-1", SAFE_TYPE1.getTypeString(), 2, foo, privid, priv.getName(), chksum1, 23, premeta2); checkObjectAndInfo(bar, Arrays.asList(new ObjectIdentifier(priv, 2)), Arrays.asList(new FakeObjectInfo(2L, "auto3-1", SAFE_TYPE1.getTypeString(), new Date(), 2, foo, new FakeResolvedWSID(priv.getName(), privid), chksum1, 23L, umeta2)), Arrays.asList(data)); failSave(bar, priv, objects, new WorkspaceAuthorizationException("User bar may not write to workspace saveobj")); ws.setPermissions(foo, priv, Arrays.asList(bar), Permission.WRITE); objinfo = ws.saveObjects(bar, priv, objects, barfac); checkObjInfo(objinfo.get(0), 2, "auto3-1", SAFE_TYPE1.getTypeString(), 3, bar, privid, priv.getName(), chksum1, 23, premeta2); failGetObjects(foo, Arrays.asList(new ObjectIdentifier(read, "booger")), new NoSuchObjectException("No object with name booger exists in workspace " + readid)); failGetObjects(foo, Arrays.asList(new ObjectIdentifier(new WorkspaceIdentifier("saveAndGetFakefake"), "booger")), new InaccessibleObjectException("Object booger cannot be accessed: No workspace with name saveAndGetFakefake exists")); ws.setPermissions(foo, priv, Arrays.asList(bar), Permission.NONE); failGetObjects(bar, Arrays.asList(new ObjectIdentifier(priv, 3)), new InaccessibleObjectException("Object 3 cannot be accessed: User bar may not read workspace saveobj")); failGetObjects(null, Arrays.asList(new ObjectIdentifier(priv, 3)), new InaccessibleObjectException("Object 3 cannot be accessed: Anonymous users may not read workspace saveobj")); //test get object info where null is returned instead of exception List<ObjectIdentifier> nullloi = new ArrayList<ObjectIdentifier>(); nullloi.add(new ObjectIdentifier(read, 1)); nullloi.add(new ObjectIdentifier(read, "booger")); nullloi.add(new ObjectIdentifier(new WorkspaceIdentifier("saveAndGetFakefake"), "booger")); nullloi.add(new ObjectIdentifier(read, 1, 1)); List<ObjectInformation> nullobjinfo = ws.getObjectInformation(foo, nullloi, true, true); checkObjInfo(nullobjinfo.get(0), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, premeta2); assertNull("Obj info is null for inaccessible object", nullobjinfo.get(1)); assertNull("Obj info is null for inaccessible object", nullobjinfo.get(2)); checkObjInfo(nullobjinfo.get(3), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, premeta); nullloi.clear(); nullloi.add(new ObjectIdentifier(new WorkspaceIdentifier(readid), "auto3")); nullloi.add(new ObjectIdentifier(priv, 2)); nullloi.add(new ObjectIdentifier(new WorkspaceIdentifier(readid), "auto3", 1)); nullloi.add(new ObjectIdentifier(priv, 3)); nullloi.add(new ObjectIdentifier(new WorkspaceIdentifier(readid), 1)); nullobjinfo = ws.getObjectInformation(bar, nullloi, false, true); checkObjInfo(nullobjinfo.get(0), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, null); assertNull("Obj info is null for inaccessible object", nullobjinfo.get(1)); checkObjInfo(nullobjinfo.get(2), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, null); assertNull("Obj info is null for inaccessible object", nullobjinfo.get(1)); checkObjInfo(nullobjinfo.get(4), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, null); nullloi.clear(); nullloi.add(new ObjectIdentifier(new WorkspaceIdentifier(readid), 1, 1)); nullloi.add(new ObjectIdentifier(priv, 3)); nullloi.add(new ObjectIdentifier(read, "auto3")); nullobjinfo = ws.getObjectInformation(null, nullloi, true, true); checkObjInfo(nullobjinfo.get(0), 1, "auto3", SAFE_TYPE1.getTypeString(), 1, foo, readid, read.getName(), chksum1, 23, premeta); assertNull("Obj info is null for inaccessible object", nullobjinfo.get(1)); checkObjInfo(nullobjinfo.get(2), 1, "auto3", SAFE_TYPE1.getTypeString(), 2, foo, readid, read.getName(), chksum2, 24, premeta2); ws.setObjectsDeleted(foo, Arrays.asList(new ObjectIdentifier(priv, 3)), true); ws.setWorkspaceDeleted(foo, read, true); nullobjinfo = ws.getObjectInformation(null, nullloi, true, true); assertNull("Obj info is null for inaccessible object", nullobjinfo.get(0)); assertNull("Obj info is null for inaccessible object", nullobjinfo.get(1)); assertNull("Obj info is null for inaccessible object", nullobjinfo.get(2)); ws.setWorkspaceDeleted(foo, read, false); ws.setGlobalPermission(foo, read, Permission.NONE); } @Test public void saveObjectsAndTestExtractedMeta() throws Exception { String module = "TestMetaData"; String spec = "module " + module + " {" + "/* @metadata ws val \n@metadata ws length(l) as Length of list*/"+ "typedef structure { string val; list<int> l; } MyType;" + "};"; WorkspaceUser userfoo = new WorkspaceUser("foo"); ws.requestModuleRegistration(userfoo, module); ws.resolveModuleRegistration(module, true); ws.compileNewTypeSpec(userfoo, spec, Arrays.asList("MyType"), null, null, false, null); TypeDefId MyType = new TypeDefId(new TypeDefName(module, "MyType"), 0, 1); WorkspaceIdentifier wspace = new WorkspaceIdentifier("metadatatest"); ws.createWorkspace(userfoo, wspace.getName(), false, null, null); Provenance emptyprov = new Provenance(userfoo); // save an object and get back object info Map<String, Object> d1 = new LinkedHashMap<String, Object>(); String val = "i should be a metadata"; d1.put("val", val); d1.put("l", Arrays.asList(1,2,3,4,5,6,7,8)); Map<String, String> metadata = new HashMap<String, String>(); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("d1"),d1, MyType, new WorkspaceUserMetadata(metadata), emptyprov, false)), getIdFactory()); List <ObjectInformation> oi = ws.getObjectInformation(userfoo, Arrays.asList(new ObjectIdentifier(wspace, "d1")), true, true); Assert.assertNotNull("Getting back an object that was saved with automatic metadata extraction", oi); Assert.assertNotNull("Getting back an object that was saved with automatic metadata extraction", oi.get(0)); // check that automatic metadata fields were populated correctly, and nothing else was added Map<String,String> savedUserMetaData = new HashMap<String, String>( oi.get(0).getUserMetaData().getMetadata()); for(Entry<String,String> m : savedUserMetaData.entrySet()) { if(m.getKey().equals("val")) Assert.assertTrue("Extracted metadata must be correct",m.getValue().equals(val)); if(m.getKey().equals("Length of list")) Assert.assertTrue("Extracted metadata must be correct",m.getValue().equals("8")); } savedUserMetaData.remove("val"); savedUserMetaData.remove("Length of list"); Assert.assertEquals("Only metadata we wanted was extracted", 0, savedUserMetaData.size()); // now we do the same thing, but make sure 1) metadata set was added, and 2) metadata is overridden // by the extracted metadata metadata.put("Length of list","i am pretty sure it was 7"); metadata.put("my_special_metadata", "yes"); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("d2"),d1, MyType, new WorkspaceUserMetadata(metadata), emptyprov, false)), getIdFactory()); List <ObjectInformation> oi2 = ws.getObjectInformation(userfoo, Arrays.asList(new ObjectIdentifier(wspace, "d2")), true, true); Assert.assertNotNull("Getting back an object that was saved with automatic metadata extraction", oi2); Assert.assertNotNull("Getting back an object that was saved with automatic metadata extraction", oi2.get(0)); savedUserMetaData = new HashMap<String, String>( oi2.get(0).getUserMetaData().getMetadata()); for(Entry<String,String> m : savedUserMetaData.entrySet()) { if(m.getKey().equals("val")) assertThat("Extracted metadata must be correct", m.getValue(), is(val)); if(m.getKey().equals("Length of list")) assertThat("Extracted metadata must be correct", m.getValue(), is("8")); if(m.getKey().equals("my_special_metadata")) assertThat("Extracted metadata must be correct", m.getValue(), is("yes")); } savedUserMetaData.remove("val"); savedUserMetaData.remove("Length of list"); savedUserMetaData.remove("my_special_metadata"); Assert.assertEquals("Only metadata we wanted was extracted", 0, savedUserMetaData.size()); } @Test public void metadataExtractedLargeTest() throws Exception { String module = "TestLargeMetadata"; String typeName = "BigMeta"; String spec = "module " + module + " {" + "/* @metadata ws val\n" + "@metadata ws length(l) as Length of list*/" + "typedef structure {" + "string val;" + "list<int> l;" + "} " + typeName + ";" + "};"; WorkspaceUser user = new WorkspaceUser("foo"); ws.requestModuleRegistration(user, module); ws.resolveModuleRegistration(module, true); ws.compileNewTypeSpec(user, spec, Arrays.asList(typeName), null, null, false, null); TypeDefId type = new TypeDefId( new TypeDefName(module, typeName), 0, 1); Provenance mtprov = new Provenance(user); WorkspaceIdentifier wsi = new WorkspaceIdentifier( "metadataExtractedLargeTest"); ws.createWorkspace(user, wsi.getName(), false, null, null); // test fail when extracted metadata > limit Map<String, Object> dBig = new LinkedHashMap<String, Object>(); dBig.put("l", Arrays.asList(1,2,3,4,5,6,7,8)); StringBuilder bigVal = new StringBuilder(); for (int i = 0; i < 18; i++) { bigVal.append(LONG_TEXT); //> 16kb now } dBig.put("val", bigVal.toString()); failSave(user, wsi, "bigextractedmeta", dBig, type, mtprov, new IllegalArgumentException( "Object #1, bigextractedmeta: Extracted metadata from object exceeds limit of 16000B")); } @Test public void encodings() throws Exception { WorkspaceUser user = new WorkspaceUser("encodings"); WorkspaceIdentifier wspace = new WorkspaceIdentifier("encodings"); ws.createWorkspace(user, wspace.getName(), false, null, null); Provenance emptyprov = new Provenance(user); StringBuffer sb = new StringBuffer(); sb.appendCodePoint(0x1F082); sb.append("a"); sb.appendCodePoint(0x1F0C6); sb.append("b"); sb.appendCodePoint(0x23824); sb.append("c"); sb.appendCodePoint(0x1685); sb.append("d"); sb.appendCodePoint(0x13B2); sb.append("e"); sb.appendCodePoint(0x06E9); String s = sb.toString() + sb.toString(); Map<String, Object> craycraymap = new HashMap<String, Object>(); craycraymap.put(s + "42", Arrays.asList(s, s + "woot", s)); craycraymap.put(s + "6", s); craycraymap.put(s + "3012", 1); String jsondata = MAPPER.writeValueAsString(craycraymap); List<Charset> csets = Arrays.asList(Charset.forName("UTF-8"), Charset.forName("UTF-16LE"), Charset.forName("UTF-16BE"), Charset.forName("UTF-32LE"), Charset.forName("UTF-32BE")); List<WorkspaceSaveObject> objs = new LinkedList<WorkspaceSaveObject>(); for (Charset cs: csets) { objs.add(new WorkspaceSaveObject(new JsonTokenStream(jsondata.getBytes(cs)), SAFE_TYPE1, null, emptyprov, false)); } ws.saveObjects(user, wspace, objs, getIdFactory()); List<WorkspaceObjectData> ret = ws.getObjects(user, Arrays.asList( new ObjectIdentifier(wspace, 1), new ObjectIdentifier(wspace, 2), new ObjectIdentifier(wspace, 3), new ObjectIdentifier(wspace, 4), new ObjectIdentifier(wspace, 5))); for (WorkspaceObjectData wod: ret) { assertThat("got correct object input in various encodings", wod.getData(), is((Object) craycraymap)); } } @Test public void saveNonStructuralObjects() throws Exception { String module = "TestNonStruct"; String spec = "module " + module + " {" + "typedef string type1;" + "typedef list<string> type2;" + "typedef mapping<string, string> type3;" + "typedef tuple<string, string> type4;" + "typedef structure { string val; } type5;" + "};"; WorkspaceUser userfoo = new WorkspaceUser("foo"); ws.requestModuleRegistration(userfoo, module); ws.resolveModuleRegistration(module, true); ws.compileNewTypeSpec(userfoo, spec, Arrays.asList( "type1", "type2", "type3", "type4", "type5"), null, null, false, null); TypeDefId abstype1 = new TypeDefId(new TypeDefName(module, "type1"), 0, 1); TypeDefId abstype2 = new TypeDefId(new TypeDefName(module, "type2"), 0, 1); TypeDefId abstype3 = new TypeDefId(new TypeDefName(module, "type3"), 0, 1); TypeDefId abstype4 = new TypeDefId(new TypeDefName(module, "type4"), 0, 1); TypeDefId abstype5 = new TypeDefId(new TypeDefName(module, "type5"), 0, 1); WorkspaceIdentifier wspace = new WorkspaceIdentifier("nonstruct"); ws.createWorkspace(userfoo, wspace.getName(), false, null, null); Provenance emptyprov = new Provenance(userfoo); Map<String, String> data3 = new HashMap<String, String>(); data3.put("val", "2"); try { ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject("data1", abstype1, null, emptyprov, false)), getIdFactory()); Assert.fail("Method works but shouldn't"); } catch (TypedObjectValidationException ex) { Assert.assertTrue(ex.getMessage(), ex.getMessage().contains("structure")); } try { ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(Arrays.asList("data2"), abstype2, null, emptyprov, false)), getIdFactory()); Assert.fail("Method works but shouldn't"); } catch (TypedObjectValidationException ex) { Assert.assertTrue(ex.getMessage(), ex.getMessage().contains("structure")); } try { ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data3, abstype3, null, emptyprov, false)), getIdFactory()); Assert.fail("Method works but shouldn't"); } catch (TypedObjectValidationException ex) { Assert.assertTrue(ex.getMessage(), ex.getMessage().contains("structure")); } try { ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(Arrays.asList("data4", "data4"), abstype4, null, emptyprov, false)), getIdFactory()); Assert.fail("Method works but shouldn't"); } catch (TypedObjectValidationException ex) { Assert.assertTrue(ex.getMessage(), ex.getMessage().contains("structure")); } ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data3, abstype5, null, emptyprov, false)), getIdFactory()); } @SuppressWarnings("unchecked") @Test public void saveNulls() throws Exception { String module = "TestNull"; String spec = "module " + module + " {" + "typedef structure { " + " string val1; " + " int val2; " + " float val3; " + "} type1; " + "typedef structure { " + " list<string> val; " + "} type2;" + "typedef structure { " + " mapping<string,string> val; " + "} type3;" + "typedef structure { " + " tuple<string,string> val; " + "} type4;" + "typedef structure { " + " list<int> val; " + "} type5;" + "typedef structure { " + " list<float> val; " + "} type6;" + "};"; WorkspaceUser userfoo = new WorkspaceUser("foo"); ws.requestModuleRegistration(userfoo, module); ws.resolveModuleRegistration(module, true); ws.compileNewTypeSpec(userfoo, spec, Arrays.asList( "type1", "type2", "type3", "type4", "type5", "type6"), null, null, false, null); WorkspaceIdentifier wspace = new WorkspaceIdentifier("nulls"); ws.createWorkspace(userfoo, wspace.getName(), false, null, null); Provenance emptyprov = new Provenance(userfoo); TypeDefId abstype1 = new TypeDefId(new TypeDefName(module, "type1"), 0, 1); TypeDefId abstype2 = new TypeDefId(new TypeDefName(module, "type2"), 0, 1); TypeDefId abstype3 = new TypeDefId(new TypeDefName(module, "type3"), 0, 1); TypeDefId abstype4 = new TypeDefId(new TypeDefName(module, "type4"), 0, 1); TypeDefId abstype5 = new TypeDefId(new TypeDefName(module, "type5"), 0, 1); TypeDefId abstype6 = new TypeDefId(new TypeDefName(module, "type6"), 0, 1); Set<String> keys = new TreeSet<String>(Arrays.asList("val1", "val2", "val3")); //TODO should try these tests with bytes vs. maps Map<String, Object> data1 = new LinkedHashMap<String, Object>(); data1.put("val3", null); data1.put("val2", null); data1.put("val1", null); Assert.assertEquals(keys, new TreeSet<String>(data1.keySet())); Assert.assertTrue(data1.containsKey("val1")); Assert.assertNull(data1.get("val1")); long data1id = ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data1, abstype1, null, emptyprov, false)), getIdFactory()).get(0).getObjectId(); Map<String, Object> data1copy = (Map<String, Object>)ws.getObjects(userfoo, Arrays.asList( new ObjectIdentifier(wspace, data1id))).get(0).getData(); Assert.assertEquals(keys, new TreeSet<String>(data1copy.keySet())); Map<String, Object> data2 = new LinkedHashMap<String, Object>(); data2.put("val", null); failSave(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data2, abstype2, null, emptyprov, false)), new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (null) does not match any allowed primitive type (allowed: [\"array\"]), at /val")); data2.put("val", Arrays.asList((String)null)); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data2, abstype2, null, emptyprov, false)), getIdFactory()); Map<String, Object> data3 = new LinkedHashMap<String, Object>(); data3.put("val", null); failSave(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data3, abstype3, null, emptyprov, false)), new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (null) does not match any allowed primitive type (allowed: [\"object\"]), at /val")); Map<String, Object> innerMap = new LinkedHashMap<String, Object>(); innerMap.put("key", null); data3.put("val", innerMap); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data3, abstype3, null, emptyprov, false)), getIdFactory()); innerMap.put(null, "foo"); failSave(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data3, abstype3, null, emptyprov, false)), new TypedObjectValidationException( "Object #1 failed type checking:\nKeys in maps/structures may not be null")); Map<String, Object> data4 = new LinkedHashMap<String, Object>(); data4.put("val", null); failSave(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data4, abstype4, null, emptyprov, false)), new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (null) does not match any allowed primitive type (allowed: [\"array\"]), at /val")); data4.put("val", Arrays.asList((String)null, (String)null)); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data4, abstype4, null, emptyprov, false)), getIdFactory()); Map<String, Object> data5 = new LinkedHashMap<String, Object>(); data5.put("val", Arrays.asList(2, (Integer)null, 1)); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data5, abstype5, null, emptyprov, false)), getIdFactory()); Map<String, Object> data6 = new LinkedHashMap<String, Object>(); data6.put("val", Arrays.asList(1.2, (Float)null, 3.6)); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data6, abstype6, null, emptyprov, false)), getIdFactory()); } @Test public void saveEmptyStringKey() throws Exception { WorkspaceUser user = new WorkspaceUser("foo"); WorkspaceIdentifier wspace = new WorkspaceIdentifier("saveEmptyStringKey"); ws.createWorkspace(user, wspace.getName(), false, null, null); Provenance mtprov = new Provenance(user); Map<String, Object> data = new HashMap<String, Object>(); data.put("", 3); //should work ws.saveObjects(user, wspace, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, mtprov, false) ), getIdFactory()); @SuppressWarnings("unchecked") Map<String, Object> dataObj = (Map<String, Object>) ws.getObjects(user, Arrays.asList( new ObjectIdentifier(wspace, 1))).get(0).getData(); assertThat("data saved correctly", dataObj, is(data)); } @Test public void saveObjectWithTypeChecking() throws Exception { final String specTypeCheck1 = "module TestTypeChecking {" + "/* @id ws */" + "typedef string reference;" + "typedef string some_id2;" + "/* @optional ref */ " + "typedef structure {" + "int foo;" + "list<int> bar;" + "string baz;" + "reference ref;" + "} CheckType;" + "};"; final String specTypeCheck2 = "module TestTypeChecking {" + "/* @id ws */" + "typedef string reference;" + "/* @optional ref\n" + " @optional map */" + "typedef structure {" + "int foo;" + "list<int> bar;" + "int baz;" + "reference ref;" + "mapping<string, string> map;" + "} CheckType;" + "};"; final String specTypeCheckRefs = "module TestTypeCheckingRefType {" + "/* @id ws TestTypeChecking.CheckType */" + "typedef string reference;" + "/* @optional refmap */" + "typedef structure {" + "int foo;" + "list<int> bar;" + "string baz;" + "reference ref;" + "mapping<reference, string> refmap;" + "} CheckRefType;" + "};"; String mod = "TestTypeChecking"; WorkspaceUser userfoo = new WorkspaceUser("foo"); ws.requestModuleRegistration(userfoo, mod); ws.resolveModuleRegistration(mod, true); ws.compileNewTypeSpec(userfoo, specTypeCheck1, Arrays.asList("CheckType"), null, null, false, null); TypeDefId abstype0 = new TypeDefId(new TypeDefName(mod, "CheckType"), 0, 1); TypeDefId abstype1 = new TypeDefId(new TypeDefName(mod, "CheckType"), 1, 0); TypeDefId abstype2 = new TypeDefId(new TypeDefName(mod, "CheckType"), 2, 0); TypeDefId relmintype0 = new TypeDefId(new TypeDefName(mod, "CheckType"), 0); TypeDefId relmintype1 = new TypeDefId(new TypeDefName(mod, "CheckType"), 1); TypeDefId relmintype2 = new TypeDefId(new TypeDefName(mod, "CheckType"), 2); TypeDefId relmaxtype = new TypeDefId(new TypeDefName(mod, "CheckType")); // test basic type checking with different versions WorkspaceIdentifier wspace = new WorkspaceIdentifier("typecheck"); ws.createWorkspace(userfoo, wspace.getName(), false, null, null); Provenance emptyprov = new Provenance(userfoo); Map<String, Object> data1 = new HashMap<String, Object>(); data1.put("foo", 3); data1.put("baz", "astring"); data1.put("bar", Arrays.asList(-3, 1, 234567890)); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(data1, abstype0, null, emptyprov, false)), getIdFactory()); //should work failSave(userfoo, wspace, data1, new TypeDefId("NoModHere.Foo"), emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nModule doesn't exist: NoModHere")); failSave(userfoo, wspace, data1, new TypeDefId("SomeModule.Foo"), emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nUnable to locate type: SomeModule.Foo")); failSave(userfoo, wspace, data1, relmintype0, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nThis type wasn't released yet and you should be an owner to access unreleased version information")); failSave(userfoo, wspace, data1, relmintype1, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nUnable to locate type: TestTypeChecking.CheckType-1")); failSave(userfoo, wspace, data1, abstype1, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nUnable to locate type: TestTypeChecking.CheckType-1.0")); failSave(userfoo, wspace, data1, relmaxtype, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nThis type wasn't released yet and you should be an owner to access unreleased version information")); ws.releaseTypes(userfoo, mod); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, relmaxtype, null, emptyprov, false)), getIdFactory()); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, abstype0, null, emptyprov, false)), getIdFactory()); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, abstype1, null, emptyprov, false)), getIdFactory()); failSave(userfoo, wspace, data1, relmintype0, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nThis type wasn't released yet and you should be an owner to access unreleased version information")); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, relmintype1, null, emptyprov, false)), getIdFactory()); failSave(userfoo, wspace, data1, relmintype2, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nUnable to locate type: TestTypeChecking.CheckType-2")); ws.compileNewTypeSpec(userfoo, specTypeCheck2, null, null, null, false, null); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, relmaxtype, null, emptyprov, false)), getIdFactory()); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, relmintype1, null, emptyprov, false)), getIdFactory()); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, abstype0, null, emptyprov, false)), getIdFactory()); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, abstype1, null, emptyprov, false)), getIdFactory()); failSave(userfoo, wspace, data1, abstype2, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (string) does not match any allowed primitive type (allowed: [\"integer\"]), at /baz")); failSave(userfoo, wspace, data1, relmintype2, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nThis type wasn't released yet and you should be an owner to access unreleased version information")); Map<String, Object> newdata = new HashMap<String, Object>(data1); newdata.put("baz", 1); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(newdata, abstype2 , null, emptyprov, false)), getIdFactory()); failSave(userfoo, wspace, newdata, abstype0, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (integer) does not match any allowed primitive type (allowed: [\"string\"]), at /baz")); failSave(userfoo, wspace, newdata, abstype1, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (integer) does not match any allowed primitive type (allowed: [\"string\"]), at /baz")); failSave(userfoo, wspace, newdata, relmaxtype, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (integer) does not match any allowed primitive type (allowed: [\"string\"]), at /baz")); failSave(userfoo, wspace, newdata, relmintype1, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (integer) does not match any allowed primitive type (allowed: [\"string\"]), at /baz")); failSave(userfoo, wspace, newdata, relmintype2, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\nThis type wasn't released yet and you should be an owner to access unreleased version information")); ws.releaseTypes(userfoo, mod); failSave(userfoo, wspace, data1, relmaxtype, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (string) does not match any allowed primitive type (allowed: [\"integer\"]), at /baz")); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, relmintype1, null, emptyprov, false)), getIdFactory()); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, abstype0, null, emptyprov, false)), getIdFactory()); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(data1, abstype1, null, emptyprov, false)), getIdFactory()); failSave(userfoo, wspace, data1, abstype2, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (string) does not match any allowed primitive type (allowed: [\"integer\"]), at /baz")); failSave(userfoo, wspace, data1, relmintype2, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (string) does not match any allowed primitive type (allowed: [\"integer\"]), at /baz")); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(newdata, abstype2 , null, emptyprov, false)), getIdFactory()); failSave(userfoo, wspace, newdata, abstype0, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (integer) does not match any allowed primitive type (allowed: [\"string\"]), at /baz")); failSave(userfoo, wspace, newdata, abstype1, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (integer) does not match any allowed primitive type (allowed: [\"string\"]), at /baz")); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(newdata, relmaxtype, null, emptyprov, false)), getIdFactory()); failSave(userfoo, wspace, newdata, relmintype1, emptyprov, new TypedObjectValidationException( "Object #1 failed type checking:\ninstance type (integer) does not match any allowed primitive type (allowed: [\"string\"]), at /baz")); ws.saveObjects(userfoo, wspace, Arrays.asList( //should work new WorkspaceSaveObject(newdata, relmintype2, null, emptyprov, false)), getIdFactory()); // test non-parseable references and typechecking with object count List<WorkspaceSaveObject> data = new ArrayList<WorkspaceSaveObject>(); data.add(new WorkspaceSaveObject(data1, abstype0, null, emptyprov, false)); Map<String, Object> data2 = new HashMap<String, Object>(data1); data2.put("bar", Arrays.asList(-3, 1, "anotherstring")); data.add(new WorkspaceSaveObject(data2, abstype0, null, emptyprov, false)); failSave(userfoo, wspace, data, new TypedObjectValidationException( "Object #2 failed type checking:\ninstance type (string) does not match any allowed primitive type (allowed: [\"integer\"]), at /bar/2")); data.set(1, new WorkspaceSaveObject(data2, abstype2, null, emptyprov, false)); @SuppressWarnings("unchecked") List<Integer> intlist = (List<Integer>) data2.get("bar"); intlist.set(2, 42); Map<String, Object> inner = new HashMap<String, Object>(); inner.put("amapkey", 42); data2.put("map", inner); data2.put("baz", 1); failSave(userfoo, wspace, data, new TypedObjectValidationException( "Object #2 failed type checking:\ninstance type (integer) does not match any allowed primitive type (allowed: [\"string\"]), at /map/amapkey")); Map<String, Object> data3 = new HashMap<String, Object>(data1); data3.put("ref", "typecheck/1/1"); data.set(1, new WorkspaceSaveObject(data3, abstype0, null, emptyprov, false)); ws.saveObjects(userfoo, wspace, data, getIdFactory()); //should work Map<String, Object> data4 = new HashMap<String, Object>(data1); data4.put("ref", "foo/bar/baz"); data.set(1, new WorkspaceSaveObject(data4, abstype0, null, emptyprov, false)); failSave(userfoo, wspace, data, new TypedObjectValidationException( "Object #2 has unparseable reference foo/bar/baz: Unable to parse version portion of object reference foo/bar/baz to an integer at /ref")); Map<String, Object> data5 = new HashMap<String, Object>(data1); data5.put("ref", null); data.set(1, new WorkspaceSaveObject(data5, abstype0, null, emptyprov, false)); failSave(userfoo, wspace, data, new TypedObjectValidationException( "Object #2 failed type checking:\ninstance type (null) not allowed for ID reference (allowed: [\"string\"]), at /ref")); Map<String, Object> data6 = new HashMap<String, Object>(data1); data6.put("ref", ""); data.set(1, new WorkspaceSaveObject(data6, abstype0, null, emptyprov, false)); failSave(userfoo, wspace, data, new TypedObjectValidationException( "Object #2 failed type checking:\nUnparseable id of type ws: IDs may not be null or the empty string at /ref")); Provenance goodids = new Provenance(userfoo); goodids.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList("typecheck/1/1"))); data.set(1, new WorkspaceSaveObject(data3, abstype0, null, goodids, false)); ws.saveObjects(userfoo, wspace, data, getIdFactory()); //should work Provenance badids = new Provenance(userfoo); badids.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList("foo/bar/baz"))); data.set(1, new WorkspaceSaveObject(data3, abstype0, null, badids, false)); failSave(userfoo, wspace, data, new TypedObjectValidationException( "Object #2 has unparseable provenance reference foo/bar/baz: Unable to parse version portion of object reference foo/bar/baz to an integer")); badids = new Provenance(userfoo); badids.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList((String) null))); data.set(1, new WorkspaceSaveObject(data3, abstype0, null, badids, false)); failSave(userfoo, wspace, data, new TypedObjectValidationException( "Object #2 has a null provenance reference")); badids = new Provenance(userfoo); badids.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList(""))); data.set(1, new WorkspaceSaveObject(data3, abstype0, null, badids, false)); failSave(userfoo, wspace, data, new TypedObjectValidationException( "Object #2 has invalid provenance reference: IDs may not be null or the empty string")); //test inaccessible references due to missing, deleted, or unreadable workspaces Map<String, Object> refdata = new HashMap<String, Object>(data1); refdata.put("ref", "thereisnoworkspaceofthisname/2/1"); failSave(userfoo, wspace, refdata, abstype0, emptyprov, new TypedObjectValidationException( "Object #1 has invalid reference: No read access to id thereisnoworkspaceofthisname/2/1: Object 2 cannot be accessed: No workspace with name thereisnoworkspaceofthisname exists at /ref")); Provenance nowsref = new Provenance(userfoo); nowsref.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList("thereisnoworkspaceofthisname/2/1"))); failSave(userfoo, wspace, data1, abstype0, nowsref, new TypedObjectValidationException( "Object #1 has invalid provenance reference: No read access to id thereisnoworkspaceofthisname/2/1: Object 2 cannot be accessed: No workspace with name thereisnoworkspaceofthisname exists")); ws.createWorkspace(userfoo, "tobedeleted", false, null, null); ws.setWorkspaceDeleted(userfoo, new WorkspaceIdentifier("tobedeleted"), true); refdata.put("ref", "tobedeleted/2/1"); failSave(userfoo, wspace, refdata, abstype0, emptyprov, new TypedObjectValidationException( "Object #1 has invalid reference: No read access to id tobedeleted/2/1: Object 2 cannot be accessed: Workspace tobedeleted is deleted at /ref")); Provenance delwsref = new Provenance(userfoo); delwsref.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList("tobedeleted/2/1"))); failSave(userfoo, wspace, data1, abstype0, delwsref, new TypedObjectValidationException( "Object #1 has invalid provenance reference: No read access to id tobedeleted/2/1: Object 2 cannot be accessed: Workspace tobedeleted is deleted")); ws.createWorkspace(new WorkspaceUser("stingyuser"), "stingyworkspace", false, null, null); refdata.put("ref", "stingyworkspace/2/1"); failSave(userfoo, wspace, refdata, abstype0, emptyprov, new TypedObjectValidationException( "Object #1 has invalid reference: No read access to id stingyworkspace/2/1: Object 2 cannot be accessed: User foo may not read workspace stingyworkspace at /ref")); Provenance privwsref = new Provenance(userfoo); privwsref.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList("stingyworkspace/2/1"))); failSave(userfoo, wspace, data1, abstype0, privwsref, new TypedObjectValidationException( "Object #1 has invalid provenance reference: No read access to id stingyworkspace/2/1: Object 2 cannot be accessed: User foo may not read workspace stingyworkspace")); //test inaccessible reference due to missing or deleted objects, incl bad versions ws.createWorkspace(userfoo, "referencetesting", false, null, null); WorkspaceIdentifier reftest = new WorkspaceIdentifier("referencetesting"); ws.saveObjects(userfoo, reftest, Arrays.asList( new WorkspaceSaveObject(newdata, abstype2 , null, emptyprov, false)), getIdFactory()); refdata.put("ref", "referencetesting/1/1"); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(refdata, abstype1 , null, emptyprov, false)), getIdFactory()); Provenance goodref = new Provenance(userfoo); goodref.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList("referencetesting/1/1"))); ws.saveObjects(userfoo, wspace, Arrays.asList( new WorkspaceSaveObject(refdata, abstype1 , null, goodref, false)), getIdFactory()); refdata.put("ref", "referencetesting/2/1"); long refwsid = ws.getWorkspaceInformation(userfoo, reftest).getId(); failSave(userfoo, wspace, refdata, abstype0, emptyprov, new TypedObjectValidationException( "Object #1 has invalid reference: There is no object with id referencetesting/2/1: No object with id 2 exists in workspace " + refwsid + " at /ref")); Provenance noobjref = new Provenance(userfoo); noobjref.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList("referencetesting/2/1"))); failSave(userfoo, wspace, data1, abstype0, noobjref, new TypedObjectValidationException( "Object #1 has invalid provenance reference: There is no object with id referencetesting/2/1: No object with id 2 exists in workspace " + refwsid)); ws.saveObjects(userfoo, reftest, Arrays.asList( new WorkspaceSaveObject(newdata, abstype2 , null, emptyprov, false)), getIdFactory()); ws.setObjectsDeleted(userfoo, Arrays.asList(new ObjectIdentifier(reftest, 2)), true); failSave(userfoo, wspace, refdata, abstype0, emptyprov, new TypedObjectValidationException(String.format( "Object #1 has invalid reference: There is no object with id referencetesting/2/1: Object 2 (name auto2) in workspace %s has been deleted at /ref", refwsid))); Provenance delobjref = new Provenance(userfoo); delobjref.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList("referencetesting/2/1"))); failSave(userfoo, wspace, data1, abstype0, delobjref, new TypedObjectValidationException(String.format( "Object #1 has invalid provenance reference: There is no object with id referencetesting/2/1: Object 2 (name auto2) in workspace %s has been deleted", refwsid))); refdata.put("ref", "referencetesting/1/2"); failSave(userfoo, wspace, refdata, abstype0, emptyprov, new TypedObjectValidationException( "Object #1 has invalid reference: There is no object with id referencetesting/1/2: No object with id 1 (name auto1) and version 2 exists in workspace " + refwsid + " at /ref")); Provenance noverref = new Provenance(userfoo); noverref.addAction(new Provenance.ProvenanceAction().withWorkspaceObjects(Arrays.asList("referencetesting/1/2"))); failSave(userfoo, wspace, data1, abstype0, noverref, new TypedObjectValidationException( "Object #1 has invalid provenance reference: There is no object with id referencetesting/1/2: No object with id 1 (name auto1) and version 2 exists in workspace " + refwsid)); //TODO test references against garbage collected objects //test reference type checking String refmod = "TestTypeCheckingRefType"; ws.requestModuleRegistration(userfoo, refmod); ws.resolveModuleRegistration(refmod, true); ws.compileNewTypeSpec(userfoo, specTypeCheckRefs, Arrays.asList("CheckRefType"), null, null, false, null); TypeDefId absreftype0 = new TypeDefId(new TypeDefName(refmod, "CheckRefType"), 0, 1); ws.createWorkspace(userfoo, "referencetypecheck", false, null, null); WorkspaceIdentifier reftypecheck = new WorkspaceIdentifier("referencetypecheck"); long reftypewsid = ws.getWorkspaceInformation(userfoo, reftypecheck).getId(); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(newdata, SAFE_TYPE1 , null, emptyprov, false)), getIdFactory()); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(newdata, abstype2 , null, emptyprov, false)), getIdFactory()); refdata.put("ref", "referencetypecheck/2/1"); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(refdata, absreftype0, null, emptyprov, false)), getIdFactory()); //should work refdata.put("ref", "referencetypecheck/2"); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(refdata, absreftype0, null, emptyprov, false)), getIdFactory()); //should work refdata.put("ref", "referencetypecheck/auto2/1"); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(refdata, absreftype0, null, emptyprov, false)), getIdFactory()); //should work refdata.put("ref", "referencetypecheck/auto2"); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(refdata, absreftype0, null, emptyprov, false)), getIdFactory()); //should work refdata.put("ref", reftypewsid + "/2/1"); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(refdata, absreftype0, null, emptyprov, false)), getIdFactory()); //should work refdata.put("ref", reftypewsid + "/2"); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(refdata, absreftype0, null, emptyprov, false)), getIdFactory()); //should work refdata.put("ref", reftypewsid + "/auto2/1"); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(refdata, absreftype0, null, emptyprov, false)), getIdFactory()); //should work refdata.put("ref", reftypewsid + "/auto2"); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(refdata, absreftype0, null, emptyprov, false)), getIdFactory()); //should work String err = "Object #1 has invalid reference: The type " + "SomeModule.AType-0.1 of reference %s in this object is not " + "allowed - allowed types are [TestTypeChecking.CheckType] at /ref"; refdata.put("ref", "referencetypecheck/1/1"); failSave(userfoo, reftypecheck, refdata, absreftype0, emptyprov, new TypedObjectValidationException(String.format(err, "referencetypecheck/1/1"))); refdata.put("ref", "referencetypecheck/1"); failSave(userfoo, reftypecheck, refdata, absreftype0, emptyprov, new TypedObjectValidationException(String.format(err, "referencetypecheck/1"))); refdata.put("ref", "referencetypecheck/auto1/1"); failSave(userfoo, reftypecheck, refdata, absreftype0, emptyprov, new TypedObjectValidationException(String.format(err, "referencetypecheck/auto1/1"))); refdata.put("ref", "referencetypecheck/auto1"); failSave(userfoo, reftypecheck, refdata, absreftype0, emptyprov, new TypedObjectValidationException(String.format(err, "referencetypecheck/auto1"))); refdata.put("ref", reftypewsid + "/1/1"); failSave(userfoo, reftypecheck, refdata, absreftype0, emptyprov, new TypedObjectValidationException(String.format(err, reftypewsid + "/1/1"))); refdata.put("ref", reftypewsid + "/1"); failSave(userfoo, reftypecheck, refdata, absreftype0, emptyprov, new TypedObjectValidationException(String.format(err, reftypewsid + "/1"))); refdata.put("ref", reftypewsid + "/auto1/1"); failSave(userfoo, reftypecheck, refdata, absreftype0, emptyprov, new TypedObjectValidationException(String.format(err, reftypewsid + "/auto1/1"))); refdata.put("ref", reftypewsid + "/auto1"); failSave(userfoo, reftypecheck, refdata, absreftype0, emptyprov, new TypedObjectValidationException(String.format(err, reftypewsid + "/auto1"))); //check references were rewritten correctly for (int i = 3; i < 11; i++) { WorkspaceObjectData wod = ws.getObjects(userfoo, Arrays.asList( new ObjectIdentifier(reftypecheck, i))).get(0); WorkspaceObjectData wodsub = ws.getObjectsSubSet(userfoo, Arrays.asList( new SubObjectIdentifier(new ObjectIdentifier(reftypecheck, i), null))).get(0); @SuppressWarnings("unchecked") Map<String, Object> obj = (Map<String, Object>) wod.getData(); @SuppressWarnings("unchecked") Map<String, Object> subobj = (Map<String, Object>) wodsub.getData(); assertThat("reference rewritten correctly", (String) obj.get("ref"), is(reftypewsid + "/2/1")); assertThat("reference included correctly", wod.getReferences(), is(Arrays.asList(reftypewsid + "/2/1"))); assertThat("sub obj reference rewritten correctly", (String) subobj.get("ref"), is(reftypewsid + "/2/1")); assertThat("sub obj reference included correctly", wodsub.getReferences(), is(Arrays.asList(reftypewsid + "/2/1"))); WorkspaceObjectInformation inf = ws.getObjectProvenance(userfoo, Arrays.asList( new ObjectIdentifier(reftypecheck, i))).get(0); assertThat("sub obj reference included correctly", inf.getReferences(), is(Arrays.asList(reftypewsid + "/2/1"))); } } @Test public void wsIdErrorOrder() throws Exception { //test that an id error returns the right id if multiple IDs exist WorkspaceUser user = new WorkspaceUser("user1"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("wsIdErrorOrder"); long wsid = ws.createWorkspace(user, wsi.getName(), false, null, null).getId(); List<WorkspaceSaveObject> objs = new LinkedList<WorkspaceSaveObject>(); Map<String, Object> d = new HashMap<String, Object>(); Provenance mtprov = new Provenance(user); objs.add(new WorkspaceSaveObject(d, SAFE_TYPE1, null, mtprov, false)); ws.saveObjects(user, wsi, objs, new IdReferenceHandlerSetFactory(0)); Provenance p = new Provenance(user).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList( wsi.getName() + "/auto1", wsi.getName() + "/auto2"))); objs.set(0, new WorkspaceSaveObject(d, SAFE_TYPE1, null, p, false)); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid provenance reference: There is no object with id wsIdErrorOrder/auto2: No object with name auto2 exists in workspace " + wsid)); } @Test public void duplicateAutoIds() throws Exception { WorkspaceUser user = new WorkspaceUser("user1"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("dupAutoIds"); ws.createWorkspace(user, wsi.getName(), false, null, null); List<WorkspaceSaveObject> objs = new LinkedList<WorkspaceSaveObject>(); Map<String, Object> d1 = new HashMap<String, Object>(); Map<String, Object> d2 = new HashMap<String, Object>(); d2.put("d", 2); Provenance mtprov = new Provenance(user); objs.add(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("auto5-foo"), d1, SAFE_TYPE1, null, mtprov, false)); objs.add(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("auto5-1-1"), d1, SAFE_TYPE1, null, mtprov, false)); objs.add(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("auto5"), d1, SAFE_TYPE1, null, mtprov, false)); objs.add(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("auto5-1"), d1, SAFE_TYPE1, null, mtprov, false)); objs.add(new WorkspaceSaveObject(d2, SAFE_TYPE1, null, mtprov, false)); ws.saveObjects(user, wsi, objs, new IdReferenceHandlerSetFactory(0)); WorkspaceObjectData d = ws.getObjects(user, Arrays.asList( new ObjectIdentifier(wsi, "auto5-2"))).get(0); assertThat("auto named correctly", d.getData(), is((Object) d2)); } @Test public void genericIdExtraction() throws Exception { String idtype1 = "someid"; String idtype2 = "someid2"; // String idtypeint = "someintid"; String mod = "TestIDExtraction"; String type = "IdType"; final String idSpec = "module " + mod + " {\n" + "/* @id " + idtype1 + " */\n" + "typedef string some_id;\n" + "/* @id " + idtype2 + " */\n" + "typedef string some_id2;\n" + // "/* @id " + idtypeint + " */" + // "typedef int int_id;" + "/* @optional an_id\n" + " @optional an_id2\n" + // " @optional an_int_id */" + "*/" + "typedef structure {\n" + "some_id an_id;\n" + "some_id2 an_id2;\n" + // "int_id an_int_id;" + "} " + type + ";\n" + "};\n"; WorkspaceUser user = new WorkspaceUser("foo"); ws.requestModuleRegistration(user, mod); ws.resolveModuleRegistration(mod, true); ws.compileNewTypeSpec(user, idSpec, Arrays.asList(type), null, null, false, null); TypeDefId idtype = new TypeDefId(new TypeDefName(mod, type), 0, 1); // test basic type checking with different versions WorkspaceIdentifier wsi = new WorkspaceIdentifier("idextract"); ws.createWorkspace(user, wsi.getName(), false, null, null); Provenance emptyprov = new Provenance(user); List<WorkspaceSaveObject> data = new LinkedList<WorkspaceSaveObject>(); data.add(new WorkspaceSaveObject(new HashMap<String, Object>(), idtype, null, emptyprov, false)); Map<String, Object> iddata = new HashMap<String, Object>(); IdReferenceHandlerSetFactory fac = getIdFactory().addFactory( new TestIDReferenceHandlerFactory(new IdReferenceType(idtype1))); data.add(new WorkspaceSaveObject(iddata, idtype, null, emptyprov, false)); iddata.put("an_id", "id here"); iddata.put("an_id2", "foo"); // iddata.put("an_int_id", 34); ws.saveObjects(user, wsi, data, fac); //should work Map<String, List<String>> expected = new HashMap<String, List<String>>(); ObjectIdentifier obj1 = new ObjectIdentifier(wsi, "auto1"); checkExternalIds(user, obj1, expected); expected.put(idtype1, Arrays.asList("id here")); ObjectIdentifier obj2 = new ObjectIdentifier(wsi, "auto2"); checkExternalIds(user, obj2, expected); fac.addFactory(new TestIDReferenceHandlerFactory(new IdReferenceType(idtype2))); ws.saveObjects(user, wsi, data, fac); //should work expected.put(idtype2, Arrays.asList("foo")); ObjectIdentifier obj4 = new ObjectIdentifier(wsi, "auto4"); checkExternalIds(user, obj4, expected); ObjectIdentifier copied = new ObjectIdentifier(wsi, "copied"); ws.copyObject(user, obj4, copied); checkExternalIds(user, copied, expected); WorkspaceIdentifier clone = new WorkspaceIdentifier("idextract_cloned"); ws.cloneWorkspace(user, wsi, clone.getName(), false, null, null); ObjectIdentifier clonedobj = new ObjectIdentifier(clone, "copied"); checkExternalIds(user, clonedobj, expected); ws.saveObjects(user, wsi, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("copied"), new HashMap<String, Object>(), idtype, null, emptyprov, false)), fac); ws.revertObject(user, new ObjectIdentifier(wsi, "copied", 1)); checkExternalIds(user, new ObjectIdentifier(wsi, "copied", 3), expected); expected.clear(); ws.revertObject(user, new ObjectIdentifier(wsi, "copied", 2)); checkExternalIds(user, new ObjectIdentifier(wsi, "copied", 4), expected); // //check int ids // fac.addFactory(new TestIDReferenceHandlerFactory(new IdReferenceType(idtypeint))); // // ws.saveObjects(user, wsi, data, fac); //should work // expected.put(idtype1, Arrays.asList("id here")); // expected.put(idtype2, Arrays.asList("foo")); // expected.put(idtypeint, Arrays.asList("34")); // checkExternalIds(user, new ObjectIdentifier(wsi, "auto7"), expected); // // iddata.put("an_int_id", null); // // failSave(user, wsi, data, fac, new TypedObjectValidationException( // "Object #2 failed type checking:\ninstance type (null) not allowed for ID reference (allowed: [\"integer\"]), at /an_int_id")); iddata.put("an_id", "parseExcept"); failSave(user, wsi, data, fac, new TypedObjectValidationException( "Object #2 failed type checking:\nUnparseable id parseExcept of type someid: Parse exception for ID parseExcept at /an_id")); iddata.clear(); iddata.put("an_id2", "refExcept"); failSave(user, wsi, data, fac, new TypedObjectValidationException( "Object #2 failed type checking:\nInvalid id refExcept of type someid2: Reference exception for ID refExcept at /an_id2")); iddata.clear(); iddata.put("an_id", "genExcept"); failSave(user, wsi, data, fac, new TypedObjectValidationException( "Object #2 failed type checking:\nId handling error for id type someid: General exception for ID genExcept at /an_id")); iddata.put("an_id", "procParseExcept"); failSave(user, wsi, data, fac, new TypedObjectValidationException( "Object #2 has unparseable reference procParseExcept: Process Parse exception for ID procParseExcept at /an_id")); iddata.clear(); iddata.put("an_id2", "procRefExcept"); failSave(user, wsi, data, fac, new TypedObjectValidationException( "Object #2 has invalid reference: Process Reference exception for ID procRefExcept at /an_id2")); iddata.clear(); iddata.put("an_id", "procGenExcept"); failSave(user, wsi, data, fac, new TypedObjectValidationException( "An error occured while processing IDs: Process General exception for ID procGenExcept")); } @Test public void wsIDHandling() throws Exception { String mod = "WsIDHandling"; String type = "IdType"; final String idSpec = "module " + mod + " {\n" + "/* @optional foo */\n" + "typedef structure {\n" + "int foo;\n" + "} Type1;\n" + "/* @optional foo */\n" + "typedef structure {\n" + "int foo;\n" + "} Type2;\n" + "/* @optional foo */\n" + "typedef structure {\n" + "int foo;\n" + "} Type3;\n" + "/* @id ws */\n" + "typedef string ws_any;\n" + "/* @id ws " + mod + ".Type1 */\n" + "typedef string ws_1;\n" + "/* @id ws " + mod + ".Type2 */\n" + "typedef string ws_2;\n" + "/* @id ws " + mod + ".Type3 */\n" + "typedef string ws_3;\n" + "/* @id ws " + mod + ".Type1 " + mod + ".Type2 */\n" + "typedef string ws_12;\n" + "/* @id ws " + mod + ".Type1 " + mod + ".Type3 */\n" + "typedef string ws_13;\n" + "/* @id ws " + mod + ".Type2 " + mod + ".Type3 */\n" + "typedef string ws_23;\n" + "/* @optional ws_any ws_1 ws_2 ws_3 ws_12 ws_13 ws_23 */\n" + "typedef structure {\n" + "list<ws_any> ws_any;\n" + "list<mapping<ws_1, int>> ws_1;\n" + "list<tuple<string, ws_2>> ws_2;\n" + "list<list<ws_3>> ws_3;\n" + "list<ws_12> ws_12;\n" + "list<ws_13> ws_13;\n" + "list<ws_23> ws_23;\n" + "} " + type + ";\n" + "};\n"; WorkspaceUser user = new WorkspaceUser("foo"); ws.requestModuleRegistration(user, mod); ws.resolveModuleRegistration(mod, true); ws.compileNewTypeSpec(user, idSpec, Arrays.asList(type, "Type1", "Type2", "Type3"), null, null, false, null); TypeDefId type1 = new TypeDefId(new TypeDefName(mod, "Type1"), 0, 1); TypeDefId type2 = new TypeDefId(new TypeDefName(mod, "Type2"), 0, 1); TypeDefId type3 = new TypeDefId(new TypeDefName(mod, "Type3"), 0, 1); TypeDefId idtype = new TypeDefId(new TypeDefName(mod, type), 0, 1); // test basic type checking with different versions WorkspaceIdentifier wsi = new WorkspaceIdentifier("wsIDHandling"); long wsid = ws.createWorkspace(user, wsi.getName(), false, null, null).getId(); Provenance emptyprov = new Provenance(user); List<WorkspaceSaveObject> objs = new LinkedList<WorkspaceSaveObject>(); IdReferenceHandlerSetFactory fac = new IdReferenceHandlerSetFactory(3); Map<String, Object> mt = new HashMap<String, Object>(); objs.add(new WorkspaceSaveObject(new ObjectIDNoWSNoVer("t1"), mt, type1, null, emptyprov, false)); objs.add(new WorkspaceSaveObject(new ObjectIDNoWSNoVer("t2"), mt, type2, null, emptyprov, false)); objs.add(new WorkspaceSaveObject(new ObjectIDNoWSNoVer("t3"), mt, type3, null, emptyprov, false)); ws.saveObjects(user, wsi, objs, fac); String ref1 = wsi.getName() + "/t1"; String ref2 = wsi.getName() + "/t2"; String ref3 = wsi.getName() + "/t3"; List<String> all3 = Arrays.asList(ref1, ref2, ref3); Map<String, Object> data = new HashMap<String, Object>(); data.put("ws_any", all3); Map<String, Integer> innermap = new HashMap<String, Integer>(); data.put("ws_1", Arrays.asList(innermap)); innermap.put(ref1, 3); ArrayList<List<String>> innertuple = new ArrayList<List<String>>(); data.put("ws_2", innertuple); innertuple.add(Arrays.asList("foo", ref2)); ArrayList<String> innerlist = new ArrayList<String>(); data.put("ws_3", Arrays.asList(innerlist)); innerlist.add(ref3); data.put("ws_12", Arrays.asList(ref1, ref2)); data.put("ws_13", Arrays.asList(ref1, ref3)); data.put("ws_23", Arrays.asList(ref2, ref3)); objs.clear(); objs.add(new WorkspaceSaveObject(data, idtype, null, emptyprov, false)); //should work ws.saveObjects(user, wsi, objs, fac); innermap.put(ref2, 4); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: The type WsIDHandling.Type2-0.1 of reference wsIDHandling/t2 in this object is not allowed - allowed types are [WsIDHandling.Type1] at /ws_1/0/wsIDHandling/t2")); innermap.remove(ref2); innermap.put(ref3, 6); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: The type WsIDHandling.Type3-0.1 of reference wsIDHandling/t3 in this object is not allowed - allowed types are [WsIDHandling.Type1] at /ws_1/0/wsIDHandling/t3")); innermap.remove(ref3); innertuple.add(Arrays.asList("bar", ref1)); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: The type WsIDHandling.Type1-0.1 of reference wsIDHandling/t1 in this object is not allowed - allowed types are [WsIDHandling.Type2] at /ws_2/1/1")); innertuple.clear(); innertuple.add(Arrays.asList("baz", ref3)); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: The type WsIDHandling.Type3-0.1 of reference wsIDHandling/t3 in this object is not allowed - allowed types are [WsIDHandling.Type2] at /ws_2/0/1")); innertuple.set(0, Arrays.asList("foo", ref2)); innerlist.add(ref1); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: The type WsIDHandling.Type1-0.1 of reference wsIDHandling/t1 in this object is not allowed - allowed types are [WsIDHandling.Type3] at /ws_3/0/1")); innerlist.set(1, ref3); innerlist.add(ref2); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: The type WsIDHandling.Type2-0.1 of reference wsIDHandling/t2 in this object is not allowed - allowed types are [WsIDHandling.Type3] at /ws_3/0/2")); innerlist.remove(2); innerlist.remove(1); data.put("ws_12", all3); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: The type WsIDHandling.Type3-0.1 of reference wsIDHandling/t3 in this object is not allowed - allowed types are [WsIDHandling.Type1, WsIDHandling.Type2] at /ws_12/2")); data.put("ws_12", Arrays.asList(ref1, ref2)); data.put("ws_13", all3); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: The type WsIDHandling.Type2-0.1 of reference wsIDHandling/t2 in this object is not allowed - allowed types are [WsIDHandling.Type1, WsIDHandling.Type3] at /ws_13/1")); data.put("ws_13", Arrays.asList(ref1, ref3)); data.put("ws_23", all3); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: The type WsIDHandling.Type1-0.1 of reference wsIDHandling/t1 in this object is not allowed - allowed types are [WsIDHandling.Type2, WsIDHandling.Type3] at /ws_23/0")); //test id path returns on parse and inaccessible object exceptions data.put("ws_23", Arrays.asList(ref2, ref3)); innertuple.set(0, Arrays.asList("foo", "YourMotherWasAHamster")); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has unparseable reference YourMotherWasAHamster: Illegal number of separators / in object reference YourMotherWasAHamster at /ws_2/0/1")); innertuple.set(0, Arrays.asList("foo", ref2)); data.remove("ws_any"); ws.setObjectsDeleted(user, Arrays.asList(new ObjectIdentifier(wsi, "t1")), true); failSave(user, wsi, objs, new TypedObjectValidationException( "Object #1 has invalid reference: There is no object with id wsIDHandling/t1: Object 1 (name t1) in workspace " + wsid + " has been deleted at /ws_12/0")); } @Test public void maxIdsPerCall() throws Exception { String idtype1 = "someid"; String idtype2 = "someid2"; String mod = "TestMaxId"; String listtype = "ListIdType"; final String idSpec = "module " + mod + " {\n" + "/* @id ws */\n" + "typedef string ws_id;\n" + "/* @id " + idtype1 + " */\n" + "typedef string some_id;\n" + "/* @id " + idtype2 + " */\n" + "typedef string some_id2;\n" + "/* @id " + idtype1 + " attrib1 */\n" + "typedef string some_id_a1;\n" + "/* @id " + idtype1 + " attrib2 */\n" + "typedef string some_id_a2;\n" + "/* @optional ws_ids\n" + " @optional some_ids\n" + " @optional some_ids2\n" + " @optional some_ids_a1\n" + " @optional some_ids_a2\n" + "*/\n" + "typedef structure {\n" + "list<ws_id> ws_ids;\n" + "list<some_id> some_ids;\n" + "list<some_id2> some_ids2;\n" + "list<some_id_a1> some_ids_a1;\n" + "list<some_id_a2> some_ids_a2;\n" + "} " + listtype + ";\n" + "};\n"; WorkspaceUser user = new WorkspaceUser("foo"); ws.requestModuleRegistration(user, mod); ws.resolveModuleRegistration(mod, true); ws.compileNewTypeSpec(user, idSpec, Arrays.asList(listtype), null, null, false, null); TypeDefId listidtype = new TypeDefId(new TypeDefName(mod, listtype), 0, 1); // test basic type checking with different versions WorkspaceIdentifier wsi = new WorkspaceIdentifier("maxids"); ws.createWorkspace(user, wsi.getName(), false, null, null); Provenance emptyprov = new Provenance(user); List<WorkspaceSaveObject> objs = new LinkedList<WorkspaceSaveObject>(); WorkspaceSaveObject mtobj = new WorkspaceSaveObject( new HashMap<String, String>(), listidtype, null, emptyprov, false); objs.add(mtobj); objs.add(mtobj); IdReferenceHandlerSetFactory fac = makeFacForMaxIDTests( Arrays.asList(idtype1, idtype2), user, 8); ws.saveObjects(user, wsi, objs, fac); objs.clear(); Map<String, Object> data1 = new HashMap<String, Object>(); data1.put("ws_ids", Arrays.asList("maxids/auto1", "maxids/auto2", "maxids/auto1")); data1.put("some_ids", Arrays.asList("foo", "bar", "foo")); data1.put("some_ids2", Arrays.asList("foo", "baz", "foo")); data1.put("some_ids_a1", Arrays.asList("foo", "bak", "foo")); data1.put("some_ids_a2", Arrays.asList("foo", "baf", "foo")); objs.add(new WorkspaceSaveObject(data1, listidtype, null, emptyprov, false)); //should work ws.saveObjects(user, wsi, objs, fac); fac = makeFacForMaxIDTests(Arrays.asList(idtype1, idtype2), user, 7); failSave(user, wsi, objs, fac, new TypedObjectValidationException( "Failed type checking at object #1 - the number of unique IDs in the saved objects exceeds the maximum allowed, 7")); Provenance p = new Provenance(user).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList( "maxids/auto1", "maxids/auto2", "maxids/auto1"))); fac = makeFacForMaxIDTests(Arrays.asList(idtype1, idtype2), user, 10); objs.set(0, new WorkspaceSaveObject(data1, listidtype, null, p, false)); //should work ws.saveObjects(user, wsi, objs, fac); fac = makeFacForMaxIDTests(Arrays.asList(idtype1, idtype2), user, 9); failSave(user, wsi, objs, fac, new TypedObjectValidationException( "Failed type checking at object #1 - the number of unique IDs in the saved objects exceeds the maximum allowed, 9")); objs.set(0, new WorkspaceSaveObject(data1, listidtype, null, emptyprov, false)); objs.add(new WorkspaceSaveObject(data1, listidtype, null, emptyprov, false)); fac = makeFacForMaxIDTests(Arrays.asList(idtype1, idtype2), user, 16); //should work ws.saveObjects(user, wsi, objs, fac); fac = makeFacForMaxIDTests(Arrays.asList(idtype1, idtype2), user, 15); failSave(user, wsi, objs, fac, new TypedObjectValidationException( "Failed type checking at object #2 - the number of unique IDs in the saved objects exceeds the maximum allowed, 15")); objs.set(0, new WorkspaceSaveObject(data1, listidtype, null, p, false)); objs.set(1, new WorkspaceSaveObject(data1, listidtype, null, p, false)); fac = makeFacForMaxIDTests(Arrays.asList(idtype1, idtype2), user, 20); //should work ws.saveObjects(user, wsi, objs, fac); fac = makeFacForMaxIDTests(Arrays.asList(idtype1, idtype2), user, 19); failSave(user, wsi, objs, fac, new TypedObjectValidationException( "Failed type checking at object #2 - the number of unique IDs in the saved objects exceeds the maximum allowed, 19")); } private IdReferenceHandlerSetFactory makeFacForMaxIDTests(List<String> idtypes, WorkspaceUser user, int max) { IdReferenceHandlerSetFactory fac = new IdReferenceHandlerSetFactory(max); // .addFactory(ws.getHandlerFactory(user)); for (String idtype: idtypes) { fac.addFactory(new TestIDReferenceHandlerFactory( new IdReferenceType(idtype))); } return fac; } @Test public void referenceClash() throws Exception { String mod = "TestTypeCheckingErr"; final String specTypeCheck1 = "module " + mod + " {" + "typedef structure {" + "int foo;" + "list<int> bar;" + "string baz;" + "} CheckType;" + "};"; WorkspaceUser userfoo = new WorkspaceUser("foo"); Provenance emptyprov = new Provenance(userfoo); ws.requestModuleRegistration(userfoo, mod); ws.resolveModuleRegistration(mod, true); ws.compileNewTypeSpec(userfoo, specTypeCheck1, Arrays.asList("CheckType"), null, null, false, null); ws.releaseTypes(userfoo, mod); TypeDefId abstype0 = new TypeDefId(new TypeDefName(mod, "CheckType"), 1, 0); String wsName = "reftypecheckerror"; ws.createWorkspace(userfoo, wsName, false, null, null); WorkspaceIdentifier reftypecheck = new WorkspaceIdentifier(wsName); Map<String, Object> refdata = new HashMap<String, Object>(); refdata.put("foo", 3); refdata.put("baz", "astring"); refdata.put("bar", Arrays.asList(-3, 1, 234567890)); ws.saveObjects(userfoo, reftypecheck, Arrays.asList( new WorkspaceSaveObject(refdata, abstype0 , null, emptyprov, false)), getIdFactory()); String refmod = "TestTypeCheckingRefTypeErr"; final String specTypeCheckRefs = "module " + refmod + " {" + "/* @id ws " + mod + ".CheckType */" + "typedef string reference;" + "/* @optional refmap */" + "typedef structure {" + "int foo;" + "list<int> bar;" + "string baz;" + "reference ref;" + "mapping<reference, string> refmap;" + "} CheckRefType;" + "};"; ws.requestModuleRegistration(userfoo, refmod); ws.resolveModuleRegistration(refmod, true); ws.compileNewTypeSpec(userfoo, specTypeCheckRefs, Arrays.asList("CheckRefType"), null, null, false, null); ws.releaseTypes(userfoo, refmod); TypeDefId absreftype0 = new TypeDefId(new TypeDefName(refmod, "CheckRefType"), 1, 0); long reftypewsid = ws.getWorkspaceInformation(userfoo, reftypecheck).getId(); //test the edge case where two keys in a hash resolve to the same reference refdata.put("ref", wsName + "/1/1"); Map<String, String> refmap = new HashMap<String, String>(); refmap.put(wsName + "/1/1", "pootypoot"); refmap.put(wsName + "/auto1/1", "pootypoot"); assertThat("refmap has 2 refs", refmap.size(), is(2)); refdata.put("refmap", refmap); failSave(userfoo, reftypecheck, refdata, absreftype0, emptyprov, new TypedObjectValidationException( "Object #1: Two references in a single hash are identical when resolved, resulting in a loss of data: " + "Duplicated key '" + reftypewsid + "/1/1' was found at /refmap")); } @Test public void saveProvenance() throws Exception { WorkspaceUser foo = new WorkspaceUser("foo"); WorkspaceIdentifier prov = new WorkspaceIdentifier("provenance"); ws.createWorkspace(foo, prov.getName(), false, null, null); long wsid = ws.getWorkspaceInformation(foo, prov).getId(); Map<String, Object> data = new HashMap<String, Object>(); data.put("foo", "bar"); Provenance emptyprov = new Provenance(foo); //already tested bad references in saveObjectWithTypeChecking, won't test again here ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, emptyprov, false)), getIdFactory()); ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, emptyprov, false)), getIdFactory()); ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, emptyprov, false)), getIdFactory()); ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("auto1"), data, SAFE_TYPE1, null, emptyprov, false)), getIdFactory()); ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("auto1"), data, SAFE_TYPE1, null, emptyprov, false)), getIdFactory()); List<ExternalData> ed = new LinkedList<ExternalData>(); ed.add(new ExternalData() .withDataId("data id") .withDataUrl("http://somedata.org/somedata") .withDescription("a description") .withResourceName("resource") .withResourceReleaseDate(new Date(62)) .withResourceUrl("http://somedata.org") .withResourceVersion("1.2.3") ); ed.add(new ExternalData().withDataId("data id2")); Provenance p = new Provenance(foo); p.addAction(new ProvenanceAction() .withCommandLine("A command line") .withDescription("descrip") .withIncomingArgs(Arrays.asList("a", "b", "c")) .withMethod("method") .withMethodParameters(Arrays.asList((Object) data, data, data)) .withOutgoingArgs(Arrays.asList("d", "e", "f")) .withScript("script") .withScriptVersion("2.1") .withServiceName("service") .withServiceVersion("3") .withTime(new Date(45)) .withExternalData(ed) .withWorkspaceObjects(Arrays.asList("provenance/auto3", "provenance/auto1/2"))); p.addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("provenance/auto2/1", "provenance/auto1"))); ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, p, false)), getIdFactory()); Map<String, String> refmap = new HashMap<String, String>(); refmap.put("provenance/auto3", wsid + "/3/1"); refmap.put("provenance/auto1/2", wsid + "/1/2"); refmap.put("provenance/auto2/1", wsid + "/2/1"); refmap.put("provenance/auto1", wsid + "/1/3"); checkProvenanceCorrect(foo, p, new ObjectIdentifier(prov, 4), refmap); try { new WorkspaceSaveObject(data, SAFE_TYPE1, null, null, false); fail("saved without provenance"); } catch (IllegalArgumentException iae) { assertThat("correct exception", iae.getLocalizedMessage(), is("Neither data, provenance, nor type may be null")); } try { new WorkspaceSaveObject(new ObjectIDNoWSNoVer("foo"), SAFE_TYPE1, null, null, false); fail("saved without provenance"); } catch (IllegalArgumentException iae) { assertThat("correct exception", iae.getLocalizedMessage(), is("Neither data, provenance, nor type may be null")); } try { new Provenance(null); } catch (IllegalArgumentException iae) { assertThat("correct exception", iae.getLocalizedMessage(), is("user cannot be null")); } try { Provenance pv = new Provenance(foo); pv.addAction(null); } catch (IllegalArgumentException iae) { assertThat("correct exception", iae.getLocalizedMessage(), is("action cannot be null")); } //Test minimal provenance Provenance p2 = new Provenance(foo); ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, p2, false)), getIdFactory()); List<Date> dates = checkProvenanceCorrect(foo, p2, new ObjectIdentifier(prov, 5), new HashMap<String, String>()); Provenance got2 = ws.getObjects(foo, Arrays.asList(new ObjectIdentifier(prov, 5))).get(0).getProvenance(); assertThat("Prov date constant", got2.getDate(), is(dates.get(0))); Provenance gotsub2 = ws.getObjectsSubSet(foo, Arrays.asList(new SubObjectIdentifier( new ObjectIdentifier(prov, 5), null))).get(0).getProvenance(); assertThat("Prov date constant", gotsub2.getDate(), is(dates.get(1))); assertThat("Prov dates same", got2.getDate(), is(gotsub2.getDate())); Provenance gotProv2 = ws.getObjectProvenance(foo, Arrays.asList( new ObjectIdentifier(prov, 5))).get(0).getProvenance(); assertThat("Prov date constant", gotProv2.getDate(), is(dates.get(2))); assertThat("Prov dates same", got2.getDate(), is(gotProv2.getDate())); //make sure passing nulls for ws obj lists doesn't kill anything Provenance p3 = new Provenance(foo); p3.addAction(new ProvenanceAction().withWorkspaceObjects(null)); ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, p3, false)), getIdFactory()); checkProvenanceCorrect(foo, p3, new ObjectIdentifier(prov, 6), new HashMap<String, String>()); Provenance p4 = new Provenance(foo); ProvenanceAction pa = new ProvenanceAction(); pa.setWorkspaceObjects(null); p4.addAction(pa); p3.addAction(new ProvenanceAction().withWorkspaceObjects(null)); ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, p4, false)), getIdFactory()); checkProvenanceCorrect(foo, p4, new ObjectIdentifier(prov, 7), new HashMap<String, String>()); } @Test public void saveLargeProvenance() throws Exception { WorkspaceUser foo = new WorkspaceUser("foo"); WorkspaceIdentifier prov = new WorkspaceIdentifier("bigprov"); ws.createWorkspace(foo, prov.getName(), false, null, null); Map<String, Object> data = new HashMap<String, Object>(); data.put("foo", "bar"); List<Object> methparams = new ArrayList<Object>(); for (int i = 1; i < 997; i++) { methparams.add(TEXT1000); } Provenance p = new Provenance(foo); p.addAction(new ProvenanceAction().withMethodParameters(methparams)); ws.saveObjects(foo, prov, Arrays.asList( //should work new WorkspaceSaveObject(data, SAFE_TYPE1, null, p, false)), getIdFactory()); methparams.add(TEXT1000); Provenance p2 = new Provenance(foo); p2.addAction(new ProvenanceAction().withMethodParameters(methparams)); try { ws.saveObjects(foo, prov, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, p, false)), getIdFactory()); fail("saved too big prov"); } catch (IllegalArgumentException iae) { assertThat("correct exception", iae.getLocalizedMessage(), is("Object #1 provenance size 1000290 exceeds limit of 1000000")); } } //TODO BF this test belongs in the user metadata test /* @Test public void bigUserMetaErrors() throws Exception { WorkspaceUser foo = new WorkspaceUser("foo"); WorkspaceIdentifier read = new WorkspaceIdentifier("bigmeta"); ws.createWorkspace(foo, read.getIdentifierString(), false, null, null); Map<String, Object> data = new HashMap<String, Object>(); Map<String, String> smallmeta = new HashMap<String, String>(); smallmeta.put("foo", "bar"); Map<String, String> meta = new HashMap<String, String>(); data.put("fubar", "bar"); JsonNode savedata = MAPPER.valueToTree(data); for (int i = 0; i < 18; i++) { meta.put(Integer.toString(i), LONG_TEXT); //> 16Mb now } try { ws.saveObjects(foo, read, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("bigmeta"), savedata, SAFE_TYPE1, meta, new Provenance(foo), false)), getIdFactory()); fail("saved object with > 16kb metadata"); } catch (IllegalArgumentException iae) { assertThat("correct exception", iae.getLocalizedMessage(), is("Metadata size of 19413 is > 16000 bytes")); } try { ws.saveObjects(foo, read, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer(3), savedata, SAFE_TYPE1, meta, new Provenance(foo), false)), getIdFactory()); fail("saved object with > 16kb metadata"); } catch (IllegalArgumentException iae) { assertThat("correct exception", iae.getLocalizedMessage(), is("Metadata size of 19413 is > 16000 bytes")); } }*/ @Test public void saveWithWrongObjectId() throws Exception { WorkspaceUser foo = new WorkspaceUser("foo"); WorkspaceIdentifier read = new WorkspaceIdentifier("wrongobjid"); ws.createWorkspace(foo, read.getIdentifierString(), false, null, null); Map<String, Object> data = new HashMap<String, Object>(); JsonNode savedata = MAPPER.valueToTree(data); try { ws.saveObjects(foo, read, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer(3), savedata, SAFE_TYPE1, null, new Provenance(foo), false)), getIdFactory()); fail("saved object with non-existant id"); } catch (NoSuchObjectException nsoe) { assertThat("correct exception", nsoe.getLocalizedMessage(), is("There is no object with id 3")); } } @Test public void unserializableData() throws Exception { WorkspaceUser foo = new WorkspaceUser("foo"); WorkspaceIdentifier read = new WorkspaceIdentifier("unserializable"); ws.createWorkspace(foo, read.getIdentifierString(), false, null, null); Object data = new StringReader("foo"); Map<String, String> meta = new HashMap<String, String>(); meta.put("foo", "bar"); try { ws.saveObjects(foo, read, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("jframe"), data, SAFE_TYPE1, new WorkspaceUserMetadata(meta), new Provenance(foo), false)), getIdFactory()); fail("saved unserializable object"); } catch (IllegalArgumentException iae) { assertThat("Actual exception: " + iae.getMessage(), iae.getMessage(), is("UObject can not serialize object of this type: java.io.StringReader")); } } @Test public void getNonexistantObjects() throws Exception { WorkspaceUser foo = new WorkspaceUser("foo"); WorkspaceIdentifier read = new WorkspaceIdentifier("nonexistantobjects"); ws.createWorkspace(foo, read.getIdentifierString(), false, null, null); long readid = ws.getWorkspaceInformation(foo, read).getId(); Map<String, Object> data = new HashMap<String, Object>(); data.put("fubar", "thingy"); JsonNode savedata = MAPPER.valueToTree(data); List<WorkspaceSaveObject> objects = new ArrayList<WorkspaceSaveObject>(); objects.add(new WorkspaceSaveObject(new ObjectIDNoWSNoVer("myname"), savedata, SAFE_TYPE1, null, new Provenance(foo), false)); ws.saveObjects(foo, read, objects, getIdFactory()); getNonExistantObject(foo, new ObjectIdentifier(read, 2), "No object with id 2 exists in workspace " + readid); getNonExistantObject(foo, new ObjectIdentifier(read, 1, 2), "No object with id 1 (name myname) and version 2 exists in workspace " + readid); getNonExistantObject(foo, new ObjectIdentifier(read, "myname2"), "No object with name myname2 exists in workspace " + readid); getNonExistantObject(foo, new ObjectIdentifier(read, "myname", 2), "No object with id 1 (name myname) and version 2 exists in workspace " + readid); } @Test public void objectIDs() throws Exception { WorkspaceIdentifier goodWs = new WorkspaceIdentifier("foo"); testObjectIdentifier("f|o.A-1_2"); testObjectIdentifier("f|o.A-1_2", 1); testObjectIdentifier(null, "foo", "wsi cannot be null"); testObjectIdentifier(goodWs, null, "Object name cannot be null or the empty string"); testObjectIdentifier(goodWs, "", "Object name cannot be null or the empty string"); testObjectIdentifier(goodWs, "f|o.A-1_2+", "Illegal character in object name f|o.A-1_2+: +"); testObjectIdentifier(goodWs, "-1", "Object names cannot be integers: -1"); testObjectIdentifier(goodWs, "15", "Object names cannot be integers: 15"); testObjectIdentifier(goodWs, "f|o.A-1_2", 0, "Object version must be > 0"); testObjectIdentifier(goodWs, TEXT256, "Object name exceeds the maximum length of 255"); testObjectIdentifier(1); testObjectIdentifier(1, 1); testObjectIdentifier(null, 1, "wsi cannot be null"); testObjectIdentifier(goodWs, 0, "Object id must be > 0"); testObjectIdentifier(goodWs, 0, 1, "Object id must be > 0"); testObjectIdentifier(goodWs, 1, 0, "Object version must be > 0"); testCreate(goodWs, "f|o.A-1_2", null); testCreate(goodWs, null, 1L); testCreate(null, "boo", null, "wsi cannot be null"); testCreate(goodWs, TEXT256, null, "Object name exceeds the maximum length of 255"); testCreate(goodWs, null, null, "Must provide one and only one of object name (was: null) or id (was: null)"); testCreate(goodWs, "boo", 1L, "Must provide one and only one of object name (was: boo) or id (was: 1)"); testCreate(goodWs, "-1", null, "Object names cannot be integers: -1"); testCreate(goodWs, "15", null, "Object names cannot be integers: 15"); testCreateVer(goodWs, "boo", null, 1); testCreateVer(goodWs, null, 1L, 1); testCreateVer(goodWs, "boo", null, null); testCreateVer(goodWs, null, 1L, null); testCreateVer(goodWs, "boo", null, 0, "Object version must be > 0"); testCreateVer(goodWs, TEXT256, null, 1, "Object name exceeds the maximum length of 255"); testCreateVer(goodWs, null, 1L, 0, "Object version must be > 0"); testRef("foo/bar"); testRef("foo/bar/1"); testRef("foo/bar/1/2", "Illegal number of separators / in object reference foo/bar/1/2"); testRef("foo/" + TEXT256 + "/1", "Object name exceeds the maximum length of 255"); testRef("foo/bar/n", "Unable to parse version portion of object reference foo/bar/n to an integer"); testRef("foo", "Illegal number of separators / in object reference foo"); testRef("1/2"); testRef("1/2/3"); testRef("1/2/3/4", "Illegal number of separators / in object reference 1/2/3/4"); testRef("1/2/n", "Unable to parse version portion of object reference 1/2/n to an integer"); testRef("1", "Illegal number of separators / in object reference 1"); testRef("foo/2"); testRef("2/foo"); testRef("foo/2/1"); testRef("2/foo/1"); } @Test public void deleteUndelete() throws Exception { WorkspaceUser user = new WorkspaceUser("deleteundelete"); WorkspaceIdentifier read = new WorkspaceIdentifier("deleteundelete"); WorkspaceInformation readinfo = ws.createWorkspace(user, read.getIdentifierString(), false, "descrip", null); long wsid = readinfo.getId(); Date lastReadDate = readinfo.getModDate(); Map<String, String> data1 = new HashMap<String, String>(); Map<String, String> data2 = new HashMap<String, String>(); data1.put("data", "1"); data2.put("data", "2"); WorkspaceSaveObject sobj1 = new WorkspaceSaveObject( new ObjectIDNoWSNoVer("obj"), data1, SAFE_TYPE1, null, new Provenance(user), false); ws.saveObjects(user, read, Arrays.asList(sobj1, new WorkspaceSaveObject(new ObjectIDNoWSNoVer("obj"), data2, SAFE_TYPE1, null, new Provenance(user), false)), getIdFactory()); ObjectIdentifier o1 = new ObjectIdentifier(read, "obj", 1); ObjectIdentifier o2 = new ObjectIdentifier(read, "obj", 2); Map<ObjectIdentifier, Object> idToData = new HashMap<ObjectIdentifier, Object>(); idToData.put(o1, data1); idToData.put(o2, data2); List<ObjectIdentifier> objs = new ArrayList<ObjectIdentifier>(idToData.keySet()); checkNonDeletedObjs(user, idToData); List<ObjectIdentifier> obj1 = new ArrayList<ObjectIdentifier>(Arrays.asList(o1)); List<ObjectIdentifier> obj2 = new ArrayList<ObjectIdentifier>(Arrays.asList(o2)); try { ws.setObjectsDeleted(new WorkspaceUser("bar"), obj1, true); fail("deleted objects w/o auth"); } catch (InaccessibleObjectException ioe) { assertThat("correct exception", ioe.getLocalizedMessage(), is("Object obj cannot be accessed: User bar may not delete objects from workspace deleteundelete")); assertThat("correct object returned", ioe.getInaccessibleObject(), is(o1)); } try { ws.setObjectsDeleted(new WorkspaceUser("bar"), obj1, false); fail("undeleted objects w/o auth"); } catch (InaccessibleObjectException ioe) { assertThat("correct exception", ioe.getLocalizedMessage(), is("Object obj cannot be accessed: User bar may not undelete objects from workspace deleteundelete")); assertThat("correct object returned", ioe.getInaccessibleObject(), is(o1)); } lastReadDate = ws.getWorkspaceInformation(user, read).getModDate(); ws.setObjectsDeleted(user, obj1, true); lastReadDate = assertWorkspaceDateUpdated(user, read, lastReadDate, "ws date updated on delete"); String err = String.format("Object 1 (name obj) in workspace %s has been deleted", wsid); failToGetDeletedObjects(user, objs, err); failToGetDeletedObjects(user, obj1, err); failToGetDeletedObjects(user, obj2, err); try { ws.setObjectsDeleted(user, obj2, true); //should have no effect } catch (NoSuchObjectException nsoe) { assertThat("correct exception", nsoe.getLocalizedMessage(), is("Object 1 (name obj) in workspace " + wsid + " has been deleted")); } failToGetDeletedObjects(user, objs, err); failToGetDeletedObjects(user, obj1, err); failToGetDeletedObjects(user, obj2, err); lastReadDate = ws.getWorkspaceInformation(user, read).getModDate(); ws.setObjectsDeleted(user, obj2, false); lastReadDate = assertWorkspaceDateUpdated(user, read, lastReadDate, "ws date updated on undelete"); checkNonDeletedObjs(user, idToData); lastReadDate = ws.getWorkspaceInformation(user, read).getModDate(); ws.setObjectsDeleted(user, obj1, false);//should have no effect lastReadDate = assertWorkspaceDateUpdated(user, read, lastReadDate, "ws date updated on undelete"); checkNonDeletedObjs(user, idToData); lastReadDate = ws.getWorkspaceInformation(user, read).getModDate(); ws.setObjectsDeleted(user, obj2, true); lastReadDate = assertWorkspaceDateUpdated(user, read, lastReadDate, "ws date updated on delete"); failToGetDeletedObjects(user, objs, err); failToGetDeletedObjects(user, obj1, err); failToGetDeletedObjects(user, obj2, err); //save should undelete ws.saveObjects(user, read, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("obj"), data1, SAFE_TYPE1, null, new Provenance(user), false)), getIdFactory()); ObjectIdentifier o3 = new ObjectIdentifier(read, "obj", 3); idToData.put(o3, data1); objs = new ArrayList<ObjectIdentifier>(idToData.keySet()); checkNonDeletedObjs(user, idToData); assertThat("can get ws description", ws.getWorkspaceDescription(user, read), is("descrip")); checkWSInfo(ws.getWorkspaceInformation(user, read), user, "deleteundelete", 1, Permission.OWNER, false, "unlocked", MT_META); WorkspaceUser bar = new WorkspaceUser("bar"); ws.setPermissions(user, read, Arrays.asList(bar), Permission.ADMIN); Map<User, Permission> p = new HashMap<User, Permission>(); p.put(user, Permission.OWNER); p.put(bar, Permission.ADMIN); assertThat("can get perms", ws.getPermissions( user, Arrays.asList(read)).get(0), is(p)); try { ws.setWorkspaceDeleted(bar, read, true); fail("Non owner deleted workspace"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception msg", e.getLocalizedMessage(), is("User bar may not delete workspace deleteundelete")); } WorkspaceInformation read1 = ws.getWorkspaceInformation(user, read); ws.setWorkspaceDeleted(user, read, true); WorkspaceInformation read2 = ws.listWorkspaces(user, null, null, null, null, null, true, true, false).get(0); try { ws.getWorkspaceDescription(user, read); fail("got description from deleted workspace"); } catch (NoSuchWorkspaceException e) { assertThat("correct exception msg", e.getLocalizedMessage(), is("Workspace deleteundelete is deleted")); } try { ws.getWorkspaceInformation(user, read); fail("got meta from deleted workspace"); } catch (NoSuchWorkspaceException e) { assertThat("correct exception msg", e.getLocalizedMessage(), is("Workspace deleteundelete is deleted")); } try { ws.setPermissions(user, read, Arrays.asList(bar), Permission.NONE); fail("set perms on deleted workspace"); } catch (NoSuchWorkspaceException e) { assertThat("correct exception msg", e.getLocalizedMessage(), is("Workspace deleteundelete is deleted")); } try { ws.getPermissions(user, Arrays.asList(read)); fail("got perms from deleted workspace"); } catch (NoSuchWorkspaceException e) { assertThat("correct exception msg", e.getLocalizedMessage(), is("Workspace deleteundelete is deleted")); } failGetObjects(bar, objs, new InaccessibleObjectException( "Object obj cannot be accessed: Workspace deleteundelete is deleted")); try { ws.getObjectInformation(bar, objs, false, false); fail("got obj meta from deleted workspace"); } catch (InaccessibleObjectException ioe) { assertThat("correct exception msg", ioe.getLocalizedMessage(), is("Object obj cannot be accessed: Workspace deleteundelete is deleted")); } try { ws.saveObjects(bar, read, Arrays.asList(sobj1), getIdFactory()); fail("saved objs from deleted workspace"); } catch (NoSuchWorkspaceException e) { assertThat("correct exception msg", e.getLocalizedMessage(), is("Workspace deleteundelete is deleted")); } try { ws.setObjectsDeleted(bar, obj1, true); } catch (InaccessibleObjectException ioe) { assertThat("correct exception msg", ioe.getLocalizedMessage(), is("Object obj cannot be accessed: Workspace deleteundelete is deleted")); assertThat("correct object returned", ioe.getInaccessibleObject(), is(o1)); } ws.setWorkspaceDeleted(user, read, false); WorkspaceInformation read3 = ws.getWorkspaceInformation(user, read); checkNonDeletedObjs(user, idToData); assertThat("can get ws description", ws.getWorkspaceDescription(user, read), is("descrip")); checkWSInfo(ws.getWorkspaceInformation(user, read), user, "deleteundelete", 1, Permission.OWNER, false, "unlocked", MT_META); ws.setPermissions(user, read, Arrays.asList(bar), Permission.ADMIN); assertThat("can get perms", ws.getPermissions( user, Arrays.asList(read)).get(0), is(p)); assertTrue("date changed on delete", read1.getModDate().before(read2.getModDate())); assertTrue("date changed on undelete", read2.getModDate().before(read3.getModDate())); } @Test public void testTypeMd5s() throws Exception { //see setUpWorkspaces() to find where needed specs are loaded String typeDefName = "SomeModule.AType"; Map<String,String> type2md5 = ws.translateToMd5Types(Arrays.asList(typeDefName + "-1.0"),null); Assert.assertEquals(1, type2md5.size()); String md5TypeDef = type2md5.get(typeDefName + "-1.0"); Assert.assertNotNull(md5TypeDef); Map<String, List<String>> md52semantic = ws.translateFromMd5Types(Arrays.asList(md5TypeDef)); Assert.assertEquals(1, md52semantic.size()); List<String> semList = md52semantic.get(md5TypeDef); Assert.assertNotNull(semList); Assert.assertEquals(2, semList.size()); for (String semText : semList) { TypeDefId semTypeDef = TypeDefId.fromTypeString(semText); Assert.assertEquals(typeDefName, semTypeDef.getType().getTypeString()); String verText = semTypeDef.getVerString(); Assert.assertTrue("0.1".equals(verText) || "1.0".equals(verText)); } } @Test public void testListModules() throws Exception { //see setUpWorkspaces() to find where needed specs are loaded Map<String,String> moduleNamesInList = new HashMap<String,String>(); for(String mod:ws.listModules(null)) { moduleNamesInList.put(mod, ""); } Assert.assertTrue(moduleNamesInList.containsKey("SomeModule")); Assert.assertTrue(moduleNamesInList.containsKey("TestModule")); } @Test public void testListModuleVersions() throws Exception { //see setUpWorkspaces() to find where needed specs are loaded Assert.assertEquals(3, ws.getModuleVersions("SomeModule", null).size()); Assert.assertEquals(4, ws.getModuleVersions("SomeModule", new WorkspaceUser("foo")).size()); Assert.assertEquals(2, ws.getModuleVersions("TestModule", null).size()); Assert.assertEquals(5, ws.getModuleVersions("TestModule", new WorkspaceUser("foo")).size()); } @Test public void testGetModuleInfo() throws Exception { //see setUpWorkspaces() to find where needed specs are loaded ModuleInfo m = ws.getModuleInfo(null, new ModuleDefId("TestModule")); Assert.assertTrue(m.isReleased()); Map<String,String> funcNamesInList = new HashMap<String,String>(); for(String func : m.getFunctions() ){ funcNamesInList.put(func, ""); } Assert.assertTrue(funcNamesInList.containsKey("TestModule.getFeature-2.0")); Assert.assertTrue(funcNamesInList.containsKey("TestModule.getGenome-1.0")); Map<String,String> typeNamesInList = new HashMap<String,String>(); for(Entry<AbsoluteTypeDefId, String> type : m.getTypes().entrySet() ){ typeNamesInList.put(type.getKey().getTypeString(),""); } Assert.assertTrue(typeNamesInList.containsKey("TestModule.Genome-2.0")); Assert.assertTrue(typeNamesInList.containsKey("TestModule.Feature-1.0")); try { ws.getModuleInfo(null, new ModuleDefId("MadeUpModuleThatIsNotThere")); fail("getModuleInfo of non existant module should throw a NoSuchModuleException"); } catch (NoSuchModuleException e) {} ModuleInfo m2 = ws.getModuleInfo(new WorkspaceUser("foo"), new ModuleDefId("UnreleasedModule")); Assert.assertEquals("foo", m2.getOwners().get(0)); Assert.assertFalse(m2.isReleased()); List<Long> verList = ws.getModuleVersions("UnreleasedModule", new WorkspaceUser("foo")); Assert.assertEquals(1, verList.size()); Assert.assertEquals(m2.getVersion(), verList.get(0)); } @Test public void testGetJsonSchema() throws Exception { //see setUpWorkspaces() to find where needed specs are loaded try { ws.getJsonSchema(new TypeDefId("TestModule.NonExistantType"), null); fail("getJsonSchema of non existant type should throw a NoSuchTypeException"); } catch (NoSuchTypeException e) {} // get several different schemas, make sure that no exceptions are thrown and it is valid json! String schema = ws.getJsonSchema(new TypeDefId(new TypeDefName("TestModule.Genome"),2,0), null); ObjectMapper mapper = new ObjectMapper(); JsonNode schemaNode = mapper.readTree(schema); Assert.assertEquals("Genome", schemaNode.get("id").asText()); schema = ws.getJsonSchema(new TypeDefId(new TypeDefName("TestModule.Genome"),2), null); schemaNode = mapper.readTree(schema); Assert.assertEquals("Genome", schemaNode.get("id").asText()); schema = ws.getJsonSchema(new TypeDefId("TestModule.Genome"), null); schemaNode = mapper.readTree(schema); Assert.assertEquals("Genome", schemaNode.get("id").asText()); } @Test public void testGetTypeInfo() throws Exception { //see setUpWorkspaces() to find where needed specs are loaded TypeDetailedInfo info = ws.getTypeInfo("TestModule.Genome", false, null); Assert.assertEquals("TestModule.Genome-2.0",info.getTypeDefId()); Assert.assertEquals(1, info.getReleasedModuleVersions().size()); Assert.assertEquals(2, info.getReleasedTypeVersions().size()); info = ws.getTypeInfo("TestModule.Feature", false, null); Assert.assertEquals("TestModule.Feature-1.0",info.getTypeDefId()); Assert.assertEquals(2, info.getReleasedModuleVersions().size()); Assert.assertEquals(1, info.getReleasedTypeVersions().size()); TypeDetailedInfo info2 = ws.getTypeInfo("UnreleasedModule.AType-0.1", false, new WorkspaceUser("foo")); Assert.assertEquals(1, info2.getUsingFuncDefIds().size()); Assert.assertEquals(1, info2.getModuleVersions().size()); Assert.assertEquals(1, info2.getTypeVersions().size()); Assert.assertEquals(0, info2.getReleasedModuleVersions().size()); Assert.assertEquals(0, info2.getReleasedTypeVersions().size()); Assert.assertTrue(info2.getJsonSchema().contains("kidl-structure")); Assert.assertTrue(info2.getParsingStructure().contains("Bio::KBase::KIDL::KBT::Typedef")); } @Test public void testGetFuncInfo() throws Exception { //see setUpWorkspaces() to find where needed specs are loaded try { ws.getFuncInfo("NoModuleThatExists.getFeature", false, null); fail("getFuncInfo of non existant module should throw a NoSuchModuleException"); } catch (NoSuchModuleException e) {} try { ws.getFuncInfo("TestModule.noFunctionThatIKnowOf", false, null); fail("getFuncInfo of non existant module should throw a NoSuchFuncException"); } catch (NoSuchFuncException e) {} FuncDetailedInfo info = ws.getFuncInfo("TestModule.getFeature", false, null); Assert.assertEquals("TestModule.getFeature-2.0",info.getFuncDefId()); Assert.assertEquals(1, info.getReleasedModuleVersions().size()); Assert.assertEquals(2, info.getReleasedFuncVersions().size()); info = ws.getFuncInfo("TestModule.getGenome-1.0", false, null); Assert.assertEquals("TestModule.getGenome-1.0",info.getFuncDefId()); Assert.assertEquals(1, info.getReleasedModuleVersions().size()); Assert.assertEquals(1, info.getReleasedFuncVersions().size()); FuncDetailedInfo info2 = ws.getFuncInfo("UnreleasedModule.aFunc-0.1", false, new WorkspaceUser("foo")); Assert.assertEquals(1, info2.getUsedTypeDefIds().size()); Assert.assertEquals(1, info2.getModuleVersions().size()); Assert.assertEquals(1, info2.getFuncVersions().size()); Assert.assertEquals(0, info2.getReleasedModuleVersions().size()); Assert.assertEquals(0, info2.getReleasedFuncVersions().size()); Assert.assertTrue(info2.getParsingStructure().contains("Bio::KBase::KIDL::KBT::Funcdef")); } private void setUpCopyWorkspaces(WorkspaceUser user1, WorkspaceUser user2, String refws, String ws1, String ws2) throws Exception { TypeDefId reftype = new TypeDefId(new TypeDefName("CopyRev", "RefType"), 1, 0); WorkspaceIdentifier refs = new WorkspaceIdentifier(refws); ws.createWorkspace(user1, refs.getName(), false, null, null); LinkedList<WorkspaceSaveObject> refobjs = new LinkedList<WorkspaceSaveObject>(); for (int i = 0; i < 4; i++) { refobjs.add(new WorkspaceSaveObject(new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(user1), false)); } ws.saveObjects(user1, refs, refobjs, getIdFactory()); List<WorkspaceSaveObject> wso = Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("auto2"), new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(user1), false)); ws.saveObjects(user1, refs, wso, getIdFactory()); ws.saveObjects(user1, refs, wso, getIdFactory()); Map<String, String> meta1 = makeSimpleMeta("foo", "bar"); Map<String, String> meta2 = makeSimpleMeta("foo", "baz"); Map<String, String> meta3 = makeSimpleMeta("foo", "bak"); Map<String, List<String>> data1 = makeRefData(refws + "/auto2/2"); Map<String, List<String>> data2 = makeRefData(refws + "/auto4"); Map<String, List<String>> data3 = makeRefData(refws + "/auto1"); Provenance prov1 = new Provenance(user1); prov1.addAction(new ProvenanceAction() .withCommandLine("A command line") .withDescription("descrip") .withIncomingArgs(Arrays.asList("a", "b", "c")) .withMethod("method") .withMethodParameters(Arrays.asList((Object) meta1)) .withOutgoingArgs(Arrays.asList("d", "e", "f")) .withScript("script") .withScriptVersion("2.1") .withServiceName("service") .withServiceVersion("3") .withTime(new Date(45)) .withWorkspaceObjects(Arrays.asList(refws + "/auto3", refws + "/auto2/2"))); prov1.addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList(refws + "/auto2/1", refws + "/auto1"))); Provenance prov2 = new Provenance(user1); Provenance prov3 = new Provenance(user1); prov2.addAction(new ProvenanceAction(prov1.getActions().get(0)).withServiceVersion("4") .withWorkspaceObjects(Arrays.asList(refws + "/auto2"))); prov3.addAction(new ProvenanceAction(prov1.getActions().get(0)).withServiceVersion("5") .withWorkspaceObjects(Arrays.asList(refws + "/auto3/1"))); WorkspaceIdentifier cp1 = new WorkspaceIdentifier(ws1); WorkspaceIdentifier cp2 = new WorkspaceIdentifier(ws2); ws.createWorkspace(user1, cp1.getName(), false, null, null).getId(); ws.createWorkspace(user2, cp2.getName(), false, null, null).getId(); saveObject(user1, cp1, meta1, data1, reftype, "hide", prov1, true); saveObject(user1, cp1, meta2, data2, reftype, "hide", prov2, true); saveObject(user1, cp1, meta3, data3, reftype, "hide", prov2, true); saveObject(user1, cp1, meta1, data1, reftype, "orig", prov1); saveObject(user1, cp1, meta2, data2, reftype, "orig", prov2); saveObject(user1, cp1, meta3, data3, reftype, "orig", prov3); saveObject(user1, cp1, meta1, data1, reftype, "hidetarget", prov1, true); } @Test public void copyRevert() throws Exception { WorkspaceUser user1 = new WorkspaceUser("foo"); WorkspaceUser user2 = new WorkspaceUser("bar"); String wsrefs = "copyrevertrefs"; String ws1 = "copyrevert1"; String ws2 = "copyrevert2"; setUpCopyWorkspaces(user1, user2, wsrefs, ws1, ws2); WorkspaceIdentifier cp1 = new WorkspaceIdentifier(ws1); WorkspaceIdentifier cp2 = new WorkspaceIdentifier(ws2); WorkspaceInformation cp1info = ws.getWorkspaceInformation(user1, cp1); WorkspaceInformation cp2info = ws.getWorkspaceInformation(user2, cp2); long wsid1 = cp1info.getId(); long wsid2 = cp2info.getId(); Date cp1LastDate = cp1info.getModDate(); Date cp2LastDate = cp2info.getModDate(); ObjectIdentifier oihide = new ObjectIdentifier(cp1, "hide"); List<ObjectInformation> objs = ws.getObjectHistory(user1, oihide); ObjectInformation save11 = objs.get(0); ObjectInformation save12 = objs.get(1); ObjectInformation save13 = objs.get(2); WorkspaceObjectData wod = ws.getObjects(user1, Arrays.asList(oihide)).get(0); WorkspaceObjectData swod = ws.getObjectsSubSet(user1, objIDToSubObjID(Arrays.asList(oihide))).get(0); WorkspaceObjectInformation woi = ws.getObjectProvenance(user1, Arrays.asList(oihide)).get(0); assertThat("copy ref for obj is null", wod.getCopyReference(), is((Reference) null)); assertThat("copy ref for sub obj is null", swod.getCopyReference(), is((Reference) null)); assertThat("copy ref for prov is null", woi.getCopyReference(), is((Reference) null)); //copy entire stack of hidden objects cp1LastDate = ws.getWorkspaceInformation(user1, cp1).getModDate(); ObjectInformation copied = ws.copyObject(user1, ObjectIdentifier.parseObjectReference("copyrevert1/hide"), ObjectIdentifier.parseObjectReference("copyrevert1/copyhide")); cp1LastDate = assertWorkspaceDateUpdated(user1, cp1, cp1LastDate, "ws date updated on copy"); compareObjectAndInfo(save13, copied, user1, wsid1, cp1.getName(), 4, "copyhide", 3); List<ObjectInformation> copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, 4)); compareObjectAndInfo(save11, copystack.get(0), user1, wsid1, cp1.getName(), 4, "copyhide", 1); compareObjectAndInfo(save12, copystack.get(1), user1, wsid1, cp1.getName(), 4, "copyhide", 2); compareObjectAndInfo(save13, copystack.get(2), user1, wsid1, cp1.getName(), 4, "copyhide", 3); checkUnhiddenObjectCount(user1, cp1, 6, 10); objs = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "orig")); save11 = objs.get(0); save12 = objs.get(1); save13 = objs.get(2); //copy stack of unhidden objects copied = ws.copyObject(user1, ObjectIdentifier.parseObjectReference("copyrevert1/orig"), ObjectIdentifier.parseObjectReference("copyrevert1/copied")); cp1LastDate = assertWorkspaceDateUpdated(user1, cp1, cp1LastDate, "ws date updated on copy"); compareObjectAndInfo(save13, copied, user1, wsid1, cp1.getName(), 5, "copied", 3); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "copied")); compareObjectAndInfo(save11, copystack.get(0), user1, wsid1, cp1.getName(), 5, "copied", 1); compareObjectAndInfo(save12, copystack.get(1), user1, wsid1, cp1.getName(), 5, "copied", 2); compareObjectAndInfo(save13, copystack.get(2), user1, wsid1, cp1.getName(), 5, "copied", 3); checkUnhiddenObjectCount(user1, cp1, 9, 13); //copy visible object to pre-existing hidden object copied = ws.copyObject(user1, ObjectIdentifier.parseObjectReference("copyrevert1/orig"), new ObjectIdentifier(cp1, "hidetarget")); cp1LastDate = assertWorkspaceDateUpdated(user1, cp1, cp1LastDate, "ws date updated on copy"); compareObjectAndInfo(save13, copied, user1, wsid1, cp1.getName(), 3, "hidetarget", 2); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, 3)); //0 is original object compareObjectAndInfo(save13, copystack.get(1), user1, wsid1, cp1.getName(), 3, "hidetarget", 2); checkUnhiddenObjectCount(user1, cp1, 9, 14); //copy hidden object to pre-existing visible object //check that the to version is ignored copied = ws.copyObject(user1, new ObjectIdentifier(cp1, "orig"), new ObjectIdentifier(cp1, 5, 600)); compareObjectAndInfo(save13, copied, user1, wsid1, cp1.getName(), 5, "copied", 4); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, 5)); compareObjectAndInfo(save13, copystack.get(3), user1, wsid1, cp1.getName(), 5, "copied", 4); checkUnhiddenObjectCount(user1, cp1, 10, 15); //copy specific version to existing object copied = ws.copyObject(user1, new ObjectIdentifier(new WorkspaceIdentifier(wsid1), 2, 2), ObjectIdentifier.parseObjectReference("copyrevert1/copied")); compareObjectAndInfo(save12, copied, user1, wsid1, cp1.getName(), 5, "copied", 5); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "copied")); compareObjectAndInfo(save11, copystack.get(0), user1, wsid1, cp1.getName(), 5, "copied", 1); compareObjectAndInfo(save12, copystack.get(1), user1, wsid1, cp1.getName(), 5, "copied", 2); compareObjectAndInfo(save13, copystack.get(2), user1, wsid1, cp1.getName(), 5, "copied", 3); compareObjectAndInfo(save13, copystack.get(3), user1, wsid1, cp1.getName(), 5, "copied", 4); compareObjectAndInfo(save12, copystack.get(4), user1, wsid1, cp1.getName(), 5, "copied", 5); checkUnhiddenObjectCount(user1, cp1, 11, 16); //copy specific version to hidden existing object copied = ws.copyObject(user1, new ObjectIdentifier(new WorkspaceIdentifier(wsid1), 2, 2), ObjectIdentifier.parseObjectReference("copyrevert1/hidetarget")); compareObjectAndInfo(save12, copied, user1, wsid1, cp1.getName(), 3, "hidetarget", 3); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "hidetarget")); //0 is original object compareObjectAndInfo(save13, copystack.get(1), user1, wsid1, cp1.getName(), 3, "hidetarget", 2); compareObjectAndInfo(save12, copystack.get(2), user1, wsid1, cp1.getName(), 3, "hidetarget", 3); checkUnhiddenObjectCount(user1, cp1, 11, 17); //copy specific version to new object copied = ws.copyObject(user1, new ObjectIdentifier(new WorkspaceIdentifier(wsid1), 2, 2), ObjectIdentifier.parseObjectReference("copyrevert1/newobj")); compareObjectAndInfo(save12, copied, user1, wsid1, cp1.getName(), 6, "newobj", 1); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "newobj")); compareObjectAndInfo(save12, copystack.get(0), user1, wsid1, cp1.getName(), 6, "newobj", 1); checkUnhiddenObjectCount(user1, cp1, 12, 18); //revert normal object cp1LastDate = ws.getWorkspaceInformation(user1, cp1).getModDate(); copied = ws.revertObject(user1, ObjectIdentifier.parseObjectReference("copyrevert1/copied/2")); cp1LastDate = assertWorkspaceDateUpdated(user1, cp1, cp1LastDate, "ws date updated on revert"); compareObjectAndInfo(save12, copied, user1, wsid1, cp1.getName(), 5, "copied", 6); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "copied")); compareObjectAndInfo(save11, copystack.get(0), user1, wsid1, cp1.getName(), 5, "copied", 1); compareObjectAndInfo(save12, copystack.get(1), user1, wsid1, cp1.getName(), 5, "copied", 2); compareObjectAndInfo(save13, copystack.get(2), user1, wsid1, cp1.getName(), 5, "copied", 3); compareObjectAndInfo(save13, copystack.get(3), user1, wsid1, cp1.getName(), 5, "copied", 4); compareObjectAndInfo(save12, copystack.get(4), user1, wsid1, cp1.getName(), 5, "copied", 5); compareObjectAndInfo(save12, copystack.get(5), user1, wsid1, cp1.getName(), 5, "copied", 6); checkUnhiddenObjectCount(user1, cp1, 13, 19); //revert hidden object copied = ws.revertObject(user1, ObjectIdentifier.parseObjectReference("copyrevert1/hidetarget/2")); cp1LastDate = assertWorkspaceDateUpdated(user1, cp1, cp1LastDate, "ws date updated on revert"); compareObjectAndInfo(save13, copied, user1, wsid1, cp1.getName(), 3, "hidetarget", 4); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "hidetarget")); //0 is original object compareObjectAndInfo(save13, copystack.get(1), user1, wsid1, cp1.getName(), 3, "hidetarget", 2); compareObjectAndInfo(save12, copystack.get(2), user1, wsid1, cp1.getName(), 3, "hidetarget", 3); compareObjectAndInfo(save13, copystack.get(3), user1, wsid1, cp1.getName(), 3, "hidetarget", 4); checkUnhiddenObjectCount(user1, cp1, 13, 20); //copy to new ws ws.setPermissions(user2, cp2, Arrays.asList(user1), Permission.WRITE); cp2LastDate = ws.getWorkspaceInformation(user1, cp2).getModDate(); copied = ws.copyObject(user1, ObjectIdentifier.parseObjectReference("copyrevert1/orig"), ObjectIdentifier.parseObjectReference("copyrevert2/copied")); cp2LastDate = assertWorkspaceDateUpdated(user1, cp2, cp2LastDate, "ws date updated on copy"); compareObjectAndInfo(save13, copied, user1, wsid2, cp2.getName(), 1, "copied", 3); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp2, "copied")); compareObjectAndInfo(save11, copystack.get(0), user1, wsid2, cp2.getName(), 1, "copied", 1); compareObjectAndInfo(save12, copystack.get(1), user1, wsid2, cp2.getName(), 1, "copied", 2); compareObjectAndInfo(save13, copystack.get(2), user1, wsid2, cp2.getName(), 1, "copied", 3); checkUnhiddenObjectCount(user1, cp2, 3, 3); checkUnhiddenObjectCount(user1, cp1, 13, 20); //copy to deleted object ws.setObjectsDeleted(user1, Arrays.asList( ObjectIdentifier.parseObjectReference("copyrevert1/copied")), true); copied = ws.copyObject(user1, ObjectIdentifier.parseObjectReference("copyrevert1/orig"), ObjectIdentifier.parseObjectReference("copyrevert1/copied")); compareObjectAndInfo(save13, copied, user1, wsid1, cp1.getName(), 5, "copied", 7); copystack = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "copied")); compareObjectAndInfo(save11, copystack.get(0), user1, wsid1, cp1.getName(), 5, "copied", 1); compareObjectAndInfo(save12, copystack.get(1), user1, wsid1, cp1.getName(), 5, "copied", 2); compareObjectAndInfo(save13, copystack.get(2), user1, wsid1, cp1.getName(), 5, "copied", 3); compareObjectAndInfo(save13, copystack.get(3), user1, wsid1, cp1.getName(), 5, "copied", 4); compareObjectAndInfo(save12, copystack.get(4), user1, wsid1, cp1.getName(), 5, "copied", 5); compareObjectAndInfo(save12, copystack.get(5), user1, wsid1, cp1.getName(), 5, "copied", 6); compareObjectAndInfo(save13, copystack.get(6), user1, wsid1, cp1.getName(), 5, "copied", 7); checkUnhiddenObjectCount(user1, cp1, 14, 21); failCopy(null, new ObjectIdentifier(cp1, "whooga"), new ObjectIdentifier(cp1, "hidetarget"), new InaccessibleObjectException( "Object whooga cannot be accessed: Anonymous users may not read workspace copyrevert1")); failRevert(null, new ObjectIdentifier(cp1, "whooga"), new InaccessibleObjectException( "Object whooga cannot be accessed: Anonymous users may not write to workspace copyrevert1")); failCopy(user1, new ObjectIdentifier(cp1, "foo"), new ObjectIdentifier(cp1, "bar"), new NoSuchObjectException( "No object with name foo exists in workspace " + wsid1)); failRevert(user1, new ObjectIdentifier(cp1, "foo"), new NoSuchObjectException( "No object with name foo exists in workspace " + wsid1)); failRevert(user1, new ObjectIdentifier(cp1, "orig", 4), new NoSuchObjectException( "No object with id 2 (name orig) and version 4 exists in workspace " + wsid1)); failCopy(user1, new ObjectIdentifier(cp1, "orig"), new ObjectIdentifier(cp1, 7), new NoSuchObjectException( "Copy destination is specified as object id 7 in workspace " + wsid1 + " which does not exist.")); ws.setObjectsDeleted(user1, Arrays.asList(new ObjectIdentifier(cp1, "copied")), true); failCopy(user1, new ObjectIdentifier(cp1, "copied"), new ObjectIdentifier(cp1, "hidetarget"), new NoSuchObjectException( "Object 5 (name copied) in workspace " + wsid1 + " has been deleted")); failRevert(user1, new ObjectIdentifier(cp1, "copied"), new NoSuchObjectException( "Object 5 (name copied) in workspace " + wsid1 + " has been deleted")); //now works // failCopy(user1, new ObjectIdentifier(cp1, "orig"), // new ObjectIdentifier(cp1, "copied"), new NoSuchObjectException( // "Object 5 (name copied) in workspace " + wsid1 + " has been deleted")); cp2LastDate = ws.getWorkspaceInformation(user1, cp2).getModDate(); ws.copyObject(user1, new ObjectIdentifier(cp1, "orig"), new ObjectIdentifier(cp2, "foo")); //should work cp2LastDate = assertWorkspaceDateUpdated(user1, cp2, cp2LastDate, "ws date updated on copy"); ws.setWorkspaceDeleted(user2, cp2, true); failCopy(user1, new ObjectIdentifier(cp1, "orig"), new ObjectIdentifier(cp2, "foo1"), new InaccessibleObjectException("Object foo1 cannot be accessed: Workspace copyrevert2 is deleted")); failCopy(user1, new ObjectIdentifier(cp2, "foo"), new ObjectIdentifier(cp2, "foo1"), new InaccessibleObjectException("Object foo cannot be accessed: Workspace copyrevert2 is deleted")); failRevert(user1, new ObjectIdentifier(cp2, "foo"), new InaccessibleObjectException("Object foo cannot be accessed: Workspace copyrevert2 is deleted")); ws.setWorkspaceDeleted(user2, cp2, false); ws.setPermissions(user2, cp2, Arrays.asList(user1), Permission.READ); ws.copyObject(user1, new ObjectIdentifier(cp2, "foo"), new ObjectIdentifier(cp1, "foo")); //should work failCopy(user1, new ObjectIdentifier(cp1, "foo"), new ObjectIdentifier(cp2, "foo"), new InaccessibleObjectException("Object foo cannot be accessed: User foo may not write to workspace copyrevert2")); failRevert(user1, new ObjectIdentifier(cp2, "foo", 1), new InaccessibleObjectException("Object foo cannot be accessed: User foo may not write to workspace copyrevert2")); ws.setPermissions(user2, cp2, Arrays.asList(user1), Permission.NONE); failCopy(user1, new ObjectIdentifier(cp2, "foo"), new ObjectIdentifier(cp1, "foo"), new InaccessibleObjectException("Object foo cannot be accessed: User foo may not read workspace copyrevert2")); ws.setPermissions(user2, cp2, Arrays.asList(user1), Permission.WRITE); ws.lockWorkspace(user2, cp2); failCopy(user1, new ObjectIdentifier(cp1, "orig"), new ObjectIdentifier(cp2, "foo2"), new InaccessibleObjectException( "Object foo2 cannot be accessed: The workspace with id " + wsid2 + ", name copyrevert2, is locked and may not be modified")); failRevert(user1, new ObjectIdentifier(cp2, "foo1", 1), new InaccessibleObjectException( "Object foo1 cannot be accessed: The workspace with id " + wsid2 + ", name copyrevert2, is locked and may not be modified")); } private void checkUnhiddenObjectCount(WorkspaceUser user, WorkspaceIdentifier wsi, int unhidden, int all) throws Exception { ListObjectsParameters lop = new ListObjectsParameters( user, Arrays.asList(wsi)) .withShowAllVersions(true); List<ObjectInformation> objs = ws.listObjects(lop); assertThat("orig objects hidden", objs.size(), is(unhidden)); lop.withShowHidden(true); objs = ws.listObjects(lop); assertThat("orig objects hidden", objs.size(), is(all)); } @Test public void copyReferenceVisibility() throws Exception { WorkspaceUser user1 = new WorkspaceUser("foo"); WorkspaceUser user2 = new WorkspaceUser("foo2"); WorkspaceIdentifier wsiSource1 = new WorkspaceIdentifier("copyRefVisSource1"); WorkspaceIdentifier wsiSource2 = new WorkspaceIdentifier("copyRefVisSource2"); WorkspaceIdentifier wsiCopied = new WorkspaceIdentifier("copyRefVisCopied"); long wsid1 = ws.createWorkspace(user1, wsiSource1.getName(), false, null, null).getId(); ws.setPermissions(user1, wsiSource1, Arrays.asList(user2), Permission.READ); long wsid2 = ws.createWorkspace(user1, wsiSource2.getName(), false, null, null).getId(); ws.setPermissions(user1, wsiSource2, Arrays.asList(user2), Permission.READ); ws.createWorkspace(user2, wsiCopied.getName(), false, null, null); Provenance emptyprov1 = new Provenance(user1); Provenance emptyprov2 = new Provenance(user2); List<WorkspaceSaveObject> data = new LinkedList<WorkspaceSaveObject>(); data.add(new WorkspaceSaveObject(new HashMap<String, Object>(), SAFE_TYPE1, null, emptyprov1, false)); ws.saveObjects(user1, wsiSource1, data, new IdReferenceHandlerSetFactory(0)); ws.saveObjects(user1, wsiSource2, data, new IdReferenceHandlerSetFactory(0)); final ObjectIdentifier source1 = new ObjectIdentifier(wsiSource1, 1); final ObjectIdentifier source2 = new ObjectIdentifier(wsiSource2, 1); final ObjectIdentifier copied1 = new ObjectIdentifier(wsiCopied, "foo"); final ObjectIdentifier copied2 = new ObjectIdentifier(wsiCopied, "foo1"); ws.copyObject(user2, source1, copied1); ws.copyObject(user2, source2, copied2); ws.saveObjects(user2, wsiCopied, data, new IdReferenceHandlerSetFactory(0)); final ObjectIdentifier nocopy = new ObjectIdentifier(wsiCopied, 3L); data.clear(); Map<String, Object> ref = new HashMap<String, Object>(); ref.put("refs", Arrays.asList(wsiCopied.getName() + "/foo")); data.add(new WorkspaceSaveObject(ref, REF_TYPE, null, emptyprov2, false)); ws.saveObjects(user2, wsiCopied, data, new IdReferenceHandlerSetFactory(1)); ObjectChain copyoc1 = new ObjectChain(new ObjectIdentifier(wsiCopied, 4L), Arrays.asList(copied1)); ref.put("refs", Arrays.asList(wsiCopied.getName() + "/foo1")); ws.saveObjects(user2, wsiCopied, data, new IdReferenceHandlerSetFactory(1)); ObjectChain copyoc2 = new ObjectChain(new ObjectIdentifier(wsiCopied, 5L), Arrays.asList(copied2)); ref.put("refs", Arrays.asList(wsiCopied.getName() + "/3")); ws.saveObjects(user2, wsiCopied, data, new IdReferenceHandlerSetFactory(1)); ObjectChain nocopyoc = new ObjectChain(new ObjectIdentifier(wsiCopied, 6L), Arrays.asList(nocopy)); final TestReference expectedRef1 = new TestReference(wsid1, 1, 1); final TestReference expectedRef2 = new TestReference(wsid2, 1, 1); List<ObjectIdentifier> testobjs = Arrays.asList(copied1, nocopy, copied2); List<ObjectChain> testocs = Arrays.asList(copyoc1, nocopyoc, copyoc2); List<TestReference> refnullref = Arrays.asList( expectedRef1, (TestReference) null, expectedRef2); List<TestReference> nullnullref = Arrays.asList( (TestReference) null, (TestReference) null, expectedRef2); List<TestReference> refnullnull = Arrays.asList( expectedRef1, (TestReference) null, (TestReference) null); List<Boolean> fff = Arrays.asList(false, false, false); List<Boolean> tff = Arrays.asList(true, false, false); List<Boolean> fft = Arrays.asList(false, false, true); checkCopyReference(user2, testobjs, testocs, refnullref, fff); //check 1st ref ws.setPermissions(user1, wsiSource1, Arrays.asList(user2), Permission.NONE); checkCopyReference(user2, testobjs, testocs, nullnullref, tff); ws.setPermissions(user1, wsiSource1, Arrays.asList(user2), Permission.READ); checkCopyReference(user2, testobjs, testocs, refnullref, fff); ws.setObjectsDeleted(user1, Arrays.asList(source1), true); checkCopyReference(user2, testobjs, testocs, nullnullref, tff); ws.setObjectsDeleted(user1, Arrays.asList(source1), false); checkCopyReference(user2, testobjs, testocs, refnullref, fff); ws.setWorkspaceDeleted(user1, wsiSource1, true); checkCopyReference(user2, testobjs, testocs, nullnullref, tff); ws.setWorkspaceDeleted(user1, wsiSource1, false); checkCopyReference(user2, testobjs, testocs, refnullref, fff); //check 2nd ref ws.setPermissions(user1, wsiSource2, Arrays.asList(user2), Permission.NONE); checkCopyReference(user2, testobjs, testocs, refnullnull, fft); ws.setPermissions(user1, wsiSource2, Arrays.asList(user2), Permission.READ); checkCopyReference(user2, testobjs, testocs, refnullref, fff); ws.setObjectsDeleted(user1, Arrays.asList(source2), true); checkCopyReference(user2, testobjs, testocs, refnullnull, fft); ws.setObjectsDeleted(user1, Arrays.asList(source2), false); checkCopyReference(user2, testobjs, testocs, refnullref, fff); ws.setWorkspaceDeleted(user1, wsiSource2, true); checkCopyReference(user2, testobjs, testocs, refnullnull, fft); ws.setWorkspaceDeleted(user1, wsiSource2, false); checkCopyReference(user2, testobjs, testocs, refnullref, fff); } private void checkCopyReference(WorkspaceUser user, List<ObjectIdentifier> testobjs, List<ObjectChain> testocs, List<TestReference> testRef, List<Boolean> copyAccessible) throws Exception { List<List<WorkspaceObjectInformation>> infos = new LinkedList<List<WorkspaceObjectInformation>>(); infos.add(ws.getObjectProvenance(user, testobjs)); infos.add(fromObjectData(ws.getObjects(user, testobjs))); infos.add(fromObjectData(ws.getObjectsSubSet(user, objIDToSubObjID(testobjs)))); infos.add(fromObjectData(ws.getReferencedObjects(user, testocs))); for (List<WorkspaceObjectInformation> info: infos) { for (int i = 0; i < info.size(); i++) { WorkspaceObjectInformation inf = info.get(i); assertThat("correct reference ", inf.getCopyReference() == null ? null : new TestReference(inf.getCopyReference()), is(testRef.get(i))); assertThat("correct inaccessibility", inf.isCopySourceInaccessible(), is(copyAccessible.get(i))); } } } private List<WorkspaceObjectInformation> fromObjectData( List<WorkspaceObjectData> data) { List<WorkspaceObjectInformation> ret = new LinkedList<WorkspaceObjectInformation>(); for (WorkspaceObjectData d: data) { ret.add((WorkspaceObjectInformation) d); } return ret; } @Test public void cloneWorkspace() throws Exception { WorkspaceUser user1 = new WorkspaceUser("foo"); WorkspaceUser user2 = new WorkspaceUser("bar"); String wsrefs = "clonerefs"; String ws1 = "clone1"; setUpCopyWorkspaces(user1, user2, wsrefs, ws1, "cloneunused"); WorkspaceIdentifier cp1 = new WorkspaceIdentifier(ws1); WorkspaceIdentifier clone1 = new WorkspaceIdentifier("newclone"); Map<String, String> premeta = new HashMap<String, String>(); premeta.put("clone", "workspace"); WorkspaceUserMetadata meta = new WorkspaceUserMetadata(premeta); WorkspaceInformation info1 = ws.cloneWorkspace(user1, cp1, clone1.getName(), false, null, meta); checkWSInfo(clone1, user1, "newclone", 3, Permission.OWNER, false, info1.getId(), info1.getModDate(), "unlocked", premeta); assertNull("desc ok", ws.getWorkspaceDescription(user1, clone1)); List<ObjectInformation> objs = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "hide")); ObjectInformation save11 = objs.get(0); ObjectInformation save12 = objs.get(1); ObjectInformation save13 = objs.get(2); objs = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "orig")); ObjectInformation save21 = objs.get(0); ObjectInformation save22 = objs.get(1); ObjectInformation save23 = objs.get(2); objs = ws.getObjectHistory(user1, new ObjectIdentifier(cp1, "hidetarget")); ObjectInformation save31 = objs.get(0); List<ObjectInformation> hideobjs = ws.getObjectHistory(user1, new ObjectIdentifier(clone1, "hide")); long id = hideobjs.get(0).getObjectId(); compareObjectAndInfo(save11, hideobjs.get(0), user1, info1.getId(), clone1.getName(), id, "hide", 1); compareObjectAndInfo(save12, hideobjs.get(1), user1, info1.getId(), clone1.getName(), id, "hide", 2); compareObjectAndInfo(save13, hideobjs.get(2), user1, info1.getId(), clone1.getName(), id, "hide", 3); List<ObjectInformation> origobjs = ws.getObjectHistory(user1, new ObjectIdentifier(clone1, "orig")); id = origobjs.get(0).getObjectId(); compareObjectAndInfo(save21, origobjs.get(0), user1, info1.getId(), clone1.getName(), id, "orig", 1); compareObjectAndInfo(save22, origobjs.get(1), user1, info1.getId(), clone1.getName(), id, "orig", 2); compareObjectAndInfo(save23, origobjs.get(2), user1, info1.getId(), clone1.getName(), id, "orig", 3); List<ObjectInformation> hidetarget = ws.getObjectHistory(user1, new ObjectIdentifier(clone1, "hidetarget")); id = hidetarget.get(0).getObjectId(); compareObjectAndInfo(save31, hidetarget.get(0), user1, info1.getId(), clone1.getName(), id, "hidetarget", 1); checkUnhiddenObjectCount(user1, clone1, 3, 7); ws.setObjectsDeleted(user1, Arrays.asList(new ObjectIdentifier(cp1, "hide")), true); WorkspaceIdentifier clone2 = new WorkspaceIdentifier("newclone2"); WorkspaceInformation info2 = ws.cloneWorkspace(user1, cp1, clone2.getName(), true, "my desc", null); checkWSInfo(clone2, user1, "newclone2", 2, Permission.OWNER, true, info2.getId(), info2.getModDate(), "unlocked", MT_META); assertThat("desc ok", ws.getWorkspaceDescription(user1, clone2), is("my desc")); origobjs = ws.getObjectHistory(user1, new ObjectIdentifier(clone2, "orig")); id = origobjs.get(0).getObjectId(); compareObjectAndInfo(save21, origobjs.get(0), user1, info2.getId(), clone2.getName(), id, "orig", 1); compareObjectAndInfo(save22, origobjs.get(1), user1, info2.getId(), clone2.getName(), id, "orig", 2); compareObjectAndInfo(save23, origobjs.get(2), user1, info2.getId(), clone2.getName(), id, "orig", 3); hidetarget = ws.getObjectHistory(user1, new ObjectIdentifier(clone2, "hidetarget")); id = hidetarget.get(0).getObjectId(); compareObjectAndInfo(save31, hidetarget.get(0), user1, info2.getId(), clone2.getName(), id, "hidetarget", 1); checkUnhiddenObjectCount(user1, clone2, 3, 4); ws.setWorkspaceDeleted(user1, cp1, true); failClone(user1, cp1, "fakename", null, new NoSuchWorkspaceException("Workspace clone1 is deleted", cp1)); ws.setWorkspaceDeleted(user1, cp1, false); ws.setObjectsDeleted(user1, Arrays.asList(new ObjectIdentifier(cp1, "hide")), true); failClone(null, cp1, "fakename", null, new WorkspaceAuthorizationException("Anonymous users may not read workspace clone1")); failClone(user1, null, "fakename", null, new IllegalArgumentException("Workspace identifier cannot be null")); //workspaceIdentifier used in the workspace method to check ws names tested extensively elsewhere, so just // a couple tests here failClone(user1, cp1, "bar:fakename", null, new IllegalArgumentException( "Workspace name bar:fakename must only contain the user name foo prior to the : delimiter")); failClone(user1, cp1, "9", null, new IllegalArgumentException( "Workspace names cannot be integers: 9")); failClone(user1, cp1, "foo:9", null, new IllegalArgumentException( "Workspace names cannot be integers: foo:9")); failClone(user1, cp1, "foo:fake(name", null, new IllegalArgumentException( "Illegal character in workspace name foo:fake(name: (")); failClone(user2, cp1, "fakename", null, new WorkspaceAuthorizationException("User bar may not read workspace clone1")); failClone(user1, cp1, "newclone2", null, new PreExistingWorkspaceException( "Workspace name newclone2 is already in use")); failClone(user1, new WorkspaceIdentifier("noclone"), "fakename", null, new NoSuchWorkspaceException("No workspace with name noclone exists", cp1)); ws.lockWorkspace(user1, cp1); WorkspaceIdentifier clone3 = new WorkspaceIdentifier("newclone3"); WorkspaceInformation info3 = ws.cloneWorkspace(user1, cp1, clone3.getName(), false, "my desc2", meta); checkWSInfo(clone3, user1, "newclone3", 2, Permission.OWNER, false, info3.getId(), info3.getModDate(), "unlocked", premeta); assertThat("desc ok", ws.getWorkspaceDescription(user1, clone3), is("my desc2")); origobjs = ws.getObjectHistory(user1, new ObjectIdentifier(clone3, "orig")); id = origobjs.get(0).getObjectId(); compareObjectAndInfo(save21, origobjs.get(0), user1, info3.getId(), clone3.getName(), id, "orig", 1); compareObjectAndInfo(save22, origobjs.get(1), user1, info3.getId(), clone3.getName(), id, "orig", 2); compareObjectAndInfo(save23, origobjs.get(2), user1, info3.getId(), clone3.getName(), id, "orig", 3); hidetarget = ws.getObjectHistory(user1, new ObjectIdentifier(clone3, "hidetarget")); id = hidetarget.get(0).getObjectId(); compareObjectAndInfo(save31, hidetarget.get(0), user1, info3.getId(), clone3.getName(), id, "hidetarget", 1); checkUnhiddenObjectCount(user1, clone3, 3, 4); WorkspaceIdentifier clone4 = new WorkspaceIdentifier("newclone4"); ws.cloneWorkspace(user1, cp1, clone4.getName(), true, LONG_TEXT, null); assertThat("desc ok", ws.getWorkspaceDescription(user1, clone4), is(LONG_TEXT.subSequence(0, 1000))); //TODO BF this test should go in metadata class unit tests /* Map<String, String> bigmeta = new HashMap<String, String>(); for (int i = 0; i < 141; i++) { bigmeta.put("thing" + i, TEXT100); } ws.cloneWorkspace(user1, cp1, "fakename", false, "eeswaffertheen", bigmeta); bigmeta.put("thing", TEXT100); failClone(user1, cp1, "fakename", bigmeta, new IllegalArgumentException( "Metadata size of 16076 is > 16000 bytes")); */ ws.setGlobalPermission(user1, clone2, Permission.NONE); ws.setGlobalPermission(user1, clone4, Permission.NONE); } @Test public void lockWorkspace() throws Exception { WorkspaceUser user = new WorkspaceUser("lockuser"); WorkspaceUser user2 = new WorkspaceUser("lockuser2"); WorkspaceIdentifier wsi = lockWS; Map<String, String> meta = new HashMap<String, String>(); meta.put("some meta", "for u"); long wsid = ws.createWorkspace(user, wsi.getName(), false, null, new WorkspaceUserMetadata(meta)).getId(); ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new HashMap<String, String>(), SAFE_TYPE1, new WorkspaceUserMetadata(), new Provenance(user), false)), getIdFactory()); ObjectIdentifier oi = new ObjectIdentifier(wsi, "auto1"); //these should work WorkspaceInformation info = ws.lockWorkspace(user, wsi); checkWSInfo(info, user, "lock", 1, Permission.OWNER, false, "locked", meta); successGetObjects(user, Arrays.asList(oi)); ws.cloneWorkspace(user, wsi, "lockclone", false, null, null); ws.copyObject(user, oi, new ObjectIdentifier(new WorkspaceIdentifier("lockclone"), "foo")); ws.setPermissions(user, wsi, Arrays.asList(user2), Permission.WRITE); ws.setPermissions(user, wsi, Arrays.asList(user2), Permission.NONE); ws.getPermissions(user, Arrays.asList(wsi)); ws.getWorkspaceDescription(user, wsi); ws.getWorkspaceInformation(user, wsi); ws.listObjects(new ListObjectsParameters(user, Arrays.asList(wsi))); //these should not work try { ws.lockWorkspace(user, new WorkspaceIdentifier("nolock")); fail("locked non existant ws"); } catch (NoSuchWorkspaceException e) { assertThat("correct exception", e.getLocalizedMessage(), is("No workspace with name nolock exists")); } ws.createWorkspace(user, "lock2", false, "foo", null); WorkspaceIdentifier wsi2 = new WorkspaceIdentifier("lock2"); try { ws.lockWorkspace(null, wsi2); fail("locked w/o creds"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception", e.getLocalizedMessage(), is("Anonymous users may not lock workspace lock2")); } try { ws.lockWorkspace(user2, wsi2); fail("locked w/o creds"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception", e.getLocalizedMessage(), is("User lockuser2 may not lock workspace lock2")); } ws.setWorkspaceDeleted(user, wsi2, true); try { ws.lockWorkspace(user, wsi2); fail("locked deleted ws"); } catch (NoSuchWorkspaceException e) { assertThat("correct exception", e.getLocalizedMessage(), is("Workspace lock2 is deleted")); } try { ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new HashMap<String, String>(), SAFE_TYPE1, new WorkspaceUserMetadata(), new Provenance(user), false)), getIdFactory()); fail("saved to locked workspace"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception", e.getLocalizedMessage(), is("The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.copyObject(user, oi, new ObjectIdentifier(wsi, "foo")); fail("copied to locked workspace"); } catch (InaccessibleObjectException e) { assertThat("correct exception", e.getLocalizedMessage(), is("Object foo cannot be accessed: The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.revertObject(user, oi); fail("revert to locked workspace"); } catch (InaccessibleObjectException e) { assertThat("correct exception", e.getLocalizedMessage(), is("Object auto1 cannot be accessed: The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.lockWorkspace(user, wsi); fail("locked locked workspace"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception", e.getLocalizedMessage(), is("The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.renameObject(user, oi, "boo"); fail("renamed locked workspace obj"); } catch (InaccessibleObjectException e) { assertThat("correct exception", e.getLocalizedMessage(), is("Object auto1 cannot be accessed: The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.renameWorkspace(user, wsi, "foo"); fail("renamed locked workspace obj"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception", e.getLocalizedMessage(), is("The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.setObjectsDeleted(user, Arrays.asList(oi), true); fail("deleted locked workspace obj"); } catch (InaccessibleObjectException e) { assertThat("correct exception", e.getLocalizedMessage(), is("Object auto1 cannot be accessed: The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.setObjectsHidden(user, Arrays.asList(oi), true); fail("hid locked workspace obj"); } catch (InaccessibleObjectException e) { assertThat("correct exception", e.getLocalizedMessage(), is("Object auto1 cannot be accessed: The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.setWorkspaceDeleted(user, wsi, true); fail("deleted locked workspace"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception", e.getLocalizedMessage(), is("The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.setWorkspaceDescription(user, wsi, "wugga"); fail("set desc on locked ws"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception", e.getLocalizedMessage(), is("The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } try { ws.getWorkspaceDescription(user2, wsi); fail("bad access to locked workspace"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception", e.getLocalizedMessage(), is("User lockuser2 may not read workspace lock")); } failWSMeta(user2, wsi, "some meta", "val", new WorkspaceAuthorizationException( "The workspace with id " + wsid + ", name lock, is locked and may not be modified")); //should work ws.setGlobalPermission(user, wsi, Permission.READ); checkWSInfo(ws.getWorkspaceInformation(user, wsi), user, "lock", 1, Permission.OWNER, true, "published", meta); checkWSInfo(ws.getWorkspaceInformation(user2, wsi), user, "lock", 1, Permission.NONE, true, "published", meta); ws.getWorkspaceDescription(user2, wsi); //shouldn't try { ws.setGlobalPermission(user, wsi, Permission.NONE); fail("bad access to locked workspace"); } catch (WorkspaceAuthorizationException e) { assertThat("correct exception", e.getLocalizedMessage(), is("The workspace with id " + wsid + ", name lock, is locked and may not be modified")); } } @Test public void renameObject() throws Exception { WorkspaceUser user = new WorkspaceUser("renameObjUser"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("renameObj"); WorkspaceUser user2 = new WorkspaceUser("renameObjUser2"); WorkspaceIdentifier wsi2 = new WorkspaceIdentifier("renameObj2"); WorkspaceInformation info1 = ws.createWorkspace(user, wsi.getName(), false, null, null); long wsid1 = info1.getId(); Date lastWSDate = info1.getModDate(); ws.createWorkspace(user2, wsi2.getName(), false, null, null); ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(user), false)), getIdFactory()); ws.saveObjects(user2, wsi2, Arrays.asList(new WorkspaceSaveObject( new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(user), false)), getIdFactory()); lastWSDate = ws.getWorkspaceInformation(user, wsi).getModDate(); ObjectInformation info = ws.renameObject(user, new ObjectIdentifier(wsi, "auto1"), "mynewname"); assertWorkspaceDateUpdated(user, wsi, lastWSDate, "ws date updated on rename"); checkObjInfo(info, 1L, "mynewname", SAFE_TYPE1.getTypeString(), 1, user, wsid1, "renameObj", "99914b932bd37a50b983c5e7c90ae93b", 2, null); String newname = ws.listObjects(new ListObjectsParameters(user, Arrays.asList(wsi))) .get(0).getObjectName(); assertThat("object renamed", newname, is("mynewname")); ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("myoldname"), new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(user), false)), getIdFactory()); failObjRename(user, new ObjectIdentifier(wsi, "mynewname"), "bad%name", new IllegalArgumentException( "Illegal character in object name bad%name: %")); failObjRename(user, new ObjectIdentifier(wsi, "mynewname"), "2", new IllegalArgumentException( "Object names cannot be integers: 2")); failObjRename(user, new ObjectIdentifier(wsi, "mynewname"), "myoldname", new IllegalArgumentException( "There is already an object in the workspace named myoldname")); failObjRename(user, new ObjectIdentifier(wsi, "mynewname"), "mynewname", new IllegalArgumentException( "Object is already named mynewname")); failObjRename(user, new ObjectIdentifier(wsi, "bar"), "foo", new NoSuchObjectException( "No object with name bar exists in workspace " + wsid1)); failObjRename(user, new ObjectIdentifier(wsi2, "auto1"), "foo", new InaccessibleObjectException( "Object auto1 cannot be accessed: User renameObjUser may not rename objects in workspace renameObj2")); failObjRename(null, new ObjectIdentifier(wsi2, "auto1"), "foo", new InaccessibleObjectException( "Object auto1 cannot be accessed: Anonymous users may not rename objects in workspace renameObj2")); ws.setObjectsDeleted(user, Arrays.asList(new ObjectIdentifier(wsi, "mynewname")), true); failObjRename(user, new ObjectIdentifier(wsi, "mynewname"), "foo", new InaccessibleObjectException( "Object 1 (name mynewname) in workspace " + wsid1 + " has been deleted")); ws.setWorkspaceDeleted(user, wsi, true); failObjRename(user, new ObjectIdentifier(wsi, "mynewname"), "foo", new InaccessibleObjectException( "Object mynewname cannot be accessed: Workspace renameObj is deleted")); ws.setWorkspaceDeleted(user, wsi, false); failObjRename(user, new ObjectIdentifier(new WorkspaceIdentifier("renameObjfake"), "mynewname"), "foo", new InaccessibleObjectException( "Object mynewname cannot be accessed: No workspace with name renameObjfake exists")); ws.lockWorkspace(user, wsi); failObjRename(user, new ObjectIdentifier(wsi, "mynewname"), "foo", new InaccessibleObjectException( "Object mynewname cannot be accessed: The workspace with id " + wsid1 + ", name renameObj, is locked and may not be modified")); } @Test public void renameWorkspace() throws Exception { WorkspaceUser user = new WorkspaceUser("renameWSUser"); WorkspaceUser user2 = new WorkspaceUser("renameWSUser2"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("renameWS"); WorkspaceIdentifier wsi2 = new WorkspaceIdentifier("renameWS2"); Map<String, String> meta = new HashMap<String, String>(); meta.put("?", "42"); meta.put("Panic", "towel"); WorkspaceInformation info1 = ws.createWorkspace(user, wsi.getName(), false, null, new WorkspaceUserMetadata(meta)); WorkspaceIdentifier newwsi = new WorkspaceIdentifier(user.getUser() + ":newRenameWS"); Thread.sleep(2); //make sure timestamp is different on rename WorkspaceInformation info2 = ws.renameWorkspace(user, wsi, newwsi.getName()); checkWSInfo(info2, user, newwsi.getName(), 0, Permission.OWNER, false, "unlocked", meta); assertTrue("date updated on ws rename", info2.getModDate().after(info1.getModDate())); checkWSInfo(ws.getWorkspaceInformation(user, newwsi), user, newwsi.getName(), 0, Permission.OWNER, false, "unlocked", meta); failWSRename(user, newwsi, "foo|bar", new IllegalArgumentException("Illegal character in workspace name foo|bar: |")); failWSRename(user, newwsi, "renameWSUser:9", new IllegalArgumentException("Workspace names cannot be integers: renameWSUser:9")); failWSRename(user, newwsi, "9", new IllegalArgumentException("Workspace names cannot be integers: 9")); failWSRename(user, newwsi, "foo:foobar", new IllegalArgumentException( "Workspace name foo:foobar must only contain the user name renameWSUser prior to the : delimiter")); ws.createWorkspace(user2, wsi2.getName(), false, null, null); ws.setPermissions(user2, wsi2, Arrays.asList(user), Permission.WRITE); failWSRename(user, newwsi, "renameWS2", new IllegalArgumentException("There is already a workspace named renameWS2")); failWSRename(user, newwsi, newwsi.getName(), new IllegalArgumentException("Workspace is already named renameWSUser:newRenameWS")); failWSRename(user, new WorkspaceIdentifier(newwsi.getName() + "a"), newwsi.getName(), new NoSuchWorkspaceException("No workspace with name renameWSUser:newRenameWSa exists", wsi)); failWSRename(user, wsi2, newwsi.getName(), new WorkspaceAuthorizationException("User renameWSUser may not rename workspace renameWS2")); failWSRename(null, newwsi, "renamefoo", new WorkspaceAuthorizationException("Anonymous users may not rename workspace renameWSUser:newRenameWS")); ws.setWorkspaceDeleted(user, newwsi, true); failWSRename(user, newwsi, "renamefoo", new NoSuchWorkspaceException("Workspace " + newwsi.getName() + " is deleted", newwsi)); ws.setWorkspaceDeleted(user, newwsi, false); ws.lockWorkspace(user, newwsi); failWSRename(user, newwsi, "renamefoo", new WorkspaceAuthorizationException("The workspace with id " + info1.getId() + ", name " + newwsi.getName() + ", is locked and may not be modified")); } @Test public void setGlobalRead() throws Exception { WorkspaceUser user = new WorkspaceUser("setGlobalUser"); WorkspaceUser user2 = new WorkspaceUser("setGlobalUser2"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("global"); long wsid = ws.createWorkspace(user, wsi.getName(), false, null, null).getId(); failGetWorkspaceDesc(user2, wsi, new WorkspaceAuthorizationException( "User setGlobalUser2 may not read workspace global")); ws.setGlobalPermission(user, wsi, Permission.READ); assertThat("read set correctly", ws.getPermissions(user, Arrays.asList(wsi)).get(0).get(new AllUsers('*')), is(Permission.READ)); ws.getWorkspaceDescription(user2, wsi); failSetGlobalPerm(user, null, Permission.READ, new IllegalArgumentException( "Workspace identifier cannot be null")); failSetGlobalPerm(user, wsi, Permission.WRITE, new IllegalArgumentException( "Global permissions cannot be greater than read")); failSetGlobalPerm(user2, wsi, Permission.NONE, new WorkspaceAuthorizationException( "User setGlobalUser2 may not set global permission on workspace global")); failSetGlobalPerm(null, wsi, Permission.NONE, new WorkspaceAuthorizationException( "Anonymous users may not set global permission on workspace global")); ws.setWorkspaceDeleted(user, wsi, true); failSetGlobalPerm(user, wsi, Permission.NONE, new NoSuchWorkspaceException( "Workspace global is deleted", wsi)); ws.setWorkspaceDeleted(user, wsi, false); ws.setGlobalPermission(user, wsi, Permission.NONE); ws.lockWorkspace(user, wsi); failSetGlobalPerm(user, wsi, Permission.NONE, new WorkspaceAuthorizationException( "The workspace with id " + wsid + ", name global, is locked and may not be modified")); //this is tested in lockWorkspace // ws.setGlobalPermission(user, wsi, Permission.READ); // assertThat("read set correctly on locked ws", ws.getPermissions(user, wsi).get(new AllUsers('*')), // is(Permission.READ)); } @Test public void hiddenObjects() throws Exception { WorkspaceUser user = new WorkspaceUser("hideObjUser"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("hideObj"); WorkspaceUser user2 = new WorkspaceUser("hideObjUser2"); long wsid1 = ws.createWorkspace(user, wsi.getName(), false, null, null).getId(); ObjectInformation auto1 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation auto2 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(user), true)), getIdFactory()).get(0); ObjectInformation obj1 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("obj1"), new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(user), true)), getIdFactory()).get(0); List<ObjectInformation> expected = new ArrayList<ObjectInformation>(); expected.add(auto1); ListObjectsParameters lop = new ListObjectsParameters(user, Arrays.asList(wsi)) .withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop), expected); expected.add(auto2); expected.add(obj1); compareObjectInfo(ws.listObjects(lop.withShowHidden(true)), expected); ws.setObjectsHidden(user, Arrays.asList(new ObjectIdentifier(wsi, 3), new ObjectIdentifier(wsi, "auto2")), false); compareObjectInfo(ws.listObjects(lop.withShowHidden(false)), expected); ws.setObjectsHidden(user, Arrays.asList(new ObjectIdentifier(wsi, 1), new ObjectIdentifier(wsi, "obj1")), true); expected.remove(auto1); expected.remove(obj1); compareObjectInfo(ws.listObjects(lop), expected); failSetHide(user, new ObjectIdentifier(wsi, "fake"), true, new NoSuchObjectException( "No object with name fake exists in workspace " + wsid1)); failSetHide(user, new ObjectIdentifier(new WorkspaceIdentifier("fake"), "fake"), true, new InaccessibleObjectException( "Object fake cannot be accessed: No workspace with name fake exists")); failSetHide(user2, new ObjectIdentifier(wsi, "auto1"), true, new InaccessibleObjectException( "Object auto1 cannot be accessed: User hideObjUser2 may not hide objects from workspace hideObj")); failSetHide(null, new ObjectIdentifier(wsi, "auto1"), true, new InaccessibleObjectException( "Object auto1 cannot be accessed: Anonymous users may not hide objects from workspace hideObj")); ws.setObjectsDeleted(user, Arrays.asList(new ObjectIdentifier(wsi, 3)), true); failSetHide(user, new ObjectIdentifier(wsi, 3), true, new NoSuchObjectException( "Object 3 (name obj1) in workspace " + wsid1 + " has been deleted")); ws.setObjectsDeleted(user, Arrays.asList(new ObjectIdentifier(wsi, 3)), false); ws.setWorkspaceDeleted(user, wsi, true); failSetHide(user, new ObjectIdentifier(new WorkspaceIdentifier("fake"), "fake"), true, new InaccessibleObjectException( "Object fake cannot be accessed: No workspace with name fake exists")); ws.setWorkspaceDeleted(user, wsi, false); ws.lockWorkspace(user, wsi); failSetHide(user, new ObjectIdentifier(wsi, 3), true, new InaccessibleObjectException( "Object 3 cannot be accessed: The workspace with id " + wsid1 + ", name hideObj, is locked and may not be modified")); } @Test public void listWorkspaces() throws Exception { WorkspaceUser user = new WorkspaceUser("listUser"); WorkspaceUser user2 = new WorkspaceUser("listUser2"); WorkspaceUser user3 = new WorkspaceUser("listUser3"); Map<String, String> premeta1 = new HashMap<String, String>(); premeta1.put("this is", "some meta meta"); premeta1.put("bro", "heim"); WorkspaceUserMetadata meta1 = new WorkspaceUserMetadata(premeta1); Map<String, String> premeta2 = new HashMap<String, String>(); premeta2.put("suckmaster", "burstingfoam"); WorkspaceUserMetadata meta2 = new WorkspaceUserMetadata(premeta2); WorkspaceInformation stdws = ws.createWorkspace(user, "stdws", false, null, meta1); WorkspaceInformation globalws = ws.createWorkspace(user, "globalws", true, null, meta2); WorkspaceInformation deletedws = ws.createWorkspace(user, "deletedws", false, null, null); ws.setWorkspaceDeleted(user, new WorkspaceIdentifier("deletedws"), true); ws.createWorkspace(user2, "readable", false, null, meta1); ws.setPermissions(user2, new WorkspaceIdentifier("readable"), Arrays.asList(user), Permission.READ); WorkspaceInformation readable = ws.getWorkspaceInformation(user, new WorkspaceIdentifier("readable")); ws.createWorkspace(user2, "writeable", false, null, meta2); ws.setPermissions(user2, new WorkspaceIdentifier("writeable"), Arrays.asList(user), Permission.WRITE); WorkspaceInformation writeable = ws.getWorkspaceInformation(user, new WorkspaceIdentifier("writeable")); ws.createWorkspace(user2, "adminable", false, null, null); ws.setPermissions(user2, new WorkspaceIdentifier("adminable"), Arrays.asList(user), Permission.ADMIN); WorkspaceInformation adminable = ws.getWorkspaceInformation(user, new WorkspaceIdentifier("adminable")); @SuppressWarnings("unused") WorkspaceInformation delreadable = ws.createWorkspace(user2, "delreadable", false, null, meta1); ws.setPermissions(user2, new WorkspaceIdentifier("delreadable"), Arrays.asList(user), Permission.READ); ws.setWorkspaceDeleted(user2, new WorkspaceIdentifier("delreadable"), true); ws.createWorkspace(user2, "globalreadable", true, null, meta2); WorkspaceInformation globalreadable = ws.getWorkspaceInformation(user, new WorkspaceIdentifier("globalreadable")); @SuppressWarnings("unused") WorkspaceInformation deletedglobalreadable = ws.createWorkspace(user2, "deletedglobalreadable", true, null, null); ws.setWorkspaceDeleted(user2, new WorkspaceIdentifier("deletedglobalreadable"), true); @SuppressWarnings("unused") WorkspaceInformation unreadable = ws.createWorkspace(user2, "unreadable", false, null, meta1); ws.createWorkspace(user3, "listuser3ws", false, null, null); ws.setPermissions(user3, new WorkspaceIdentifier("listuser3ws"), Arrays.asList(user), Permission.READ); WorkspaceInformation listuser3 = ws.getWorkspaceInformation(user, new WorkspaceIdentifier("listuser3ws")); ws.createWorkspace(user3, "listuser3glws", true, null, meta2); WorkspaceInformation listuser3gl = ws.getWorkspaceInformation(user, new WorkspaceIdentifier("listuser3glws")); Map<WorkspaceInformation, Boolean> expected = new HashMap<WorkspaceInformation, Boolean>(); expected.put(stdws, false); expected.put(globalws, false); expected.put(readable, false); expected.put(writeable, false); expected.put(adminable, false); expected.put(listuser3, false); checkWSInfoList(ws.listWorkspaces(user, null, null, null, null, null, true, false, false), expected); checkWSInfoList(ws.listWorkspaces(user, null, null, new WorkspaceUserMetadata(MT_META), null, null, true, false, false), expected); expected.put(globalreadable, false); expected.put(listuser3gl, false); WorkspaceInformation locked = null; try { locked = ws.getWorkspaceInformation(user, lockWS); } catch (NoSuchWorkspaceException nswe) { //ignore - means that the locking ws test has not been run yet } if (locked != null) { expected.put(locked, false); } checkWSInfoList(ws.listWorkspaces(user, null, null, null, null, null, false, false, false), expected); expected.put(deletedws, true); checkWSInfoList(ws.listWorkspaces(user, null, null, null, null, null, false, true, false), expected); expected.remove(globalreadable); expected.remove(locked); expected.remove(listuser3gl); checkWSInfoList(ws.listWorkspaces(user, null, null, null, null, null, true, true, false), expected); checkWSInfoList(ws.listWorkspaces(user, Permission.NONE, null, null, null, null, true, true, false), expected); checkWSInfoList(ws.listWorkspaces(user, Permission.READ, null, null, null, null, true, true, false), expected); expected.remove(readable); expected.remove(listuser3); checkWSInfoList(ws.listWorkspaces(user, Permission.WRITE, null, null, null, null, true, true, false), expected); expected.remove(writeable); checkWSInfoList(ws.listWorkspaces(user, Permission.ADMIN, null, null, null, null, true, true, false), expected); expected.clear(); expected.put(globalreadable, false); expected.put(listuser3gl, false); if (locked != null) { expected.put(locked, false); } WorkspaceUser newb = new WorkspaceUser("listUserAZillion"); expected.put(ws.getWorkspaceInformation(newb, new WorkspaceIdentifier("globalws")), false); checkWSInfoList(ws.listWorkspaces(newb, null, null, null, null, null, false, false, false), expected); expected.clear(); checkWSInfoList(ws.listWorkspaces(newb, null, null, null, null, null, false, false, true), expected); checkWSInfoList(ws.listWorkspaces(newb, null, null, null, null, null, true, false, false), expected); expected.put(deletedws, true); checkWSInfoList(ws.listWorkspaces(user, null, null, null, null, null, false, false, true), expected); checkWSInfoList(ws.listWorkspaces(user, null, null, null, null, null, false, true, true), expected); checkWSInfoList(ws.listWorkspaces(user, null, null, null, null, null, true, true, true), expected); checkWSInfoList(ws.listWorkspaces(user, null, null, null, null, null, false, false, true), expected); expected.clear(); expected.put(stdws, false); expected.put(globalws, false); checkWSInfoList(ws.listWorkspaces(user, null, Arrays.asList(user), null, null, null, false, false, false), expected); expected.put(readable, false); expected.put(writeable, false); expected.put(adminable, false); expected.put(globalreadable, false); checkWSInfoList(ws.listWorkspaces(user, null, Arrays.asList(user, user2), null, null, null, false, false, false), expected); expected.put(listuser3, false); expected.put(listuser3gl, false); checkWSInfoList(ws.listWorkspaces(user, null, Arrays.asList(user, user2, user3), null, null, null, false, false, false), expected); expected.remove(globalreadable); expected.remove(listuser3gl); checkWSInfoList(ws.listWorkspaces(user, null, Arrays.asList(user, user2, user3), null, null, null, true, false, false), expected); expected.remove(stdws); expected.remove(globalws); checkWSInfoList(ws.listWorkspaces(user, null, Arrays.asList(user2, user3), null, null, null, true, false, false), expected); expected.remove(readable); expected.remove(writeable); expected.remove(adminable); checkWSInfoList(ws.listWorkspaces(user, null, Arrays.asList(user3), null, null, null, true, false, false), expected); Map<String, String> querymeta = new HashMap<String, String>(); querymeta.put("suckmaster", "burstingfoam"); expected.clear(); expected.put(globalws, false); expected.put(writeable, false); expected.put(globalreadable, false); expected.put(listuser3gl, false); checkWSInfoList(ws.listWorkspaces(user, null, null, new WorkspaceUserMetadata(querymeta), null, null, false, false, false), expected); querymeta.clear(); querymeta.put("this is", "some meta meta"); expected.clear(); expected.put(stdws, false); expected.put(readable, false); checkWSInfoList(ws.listWorkspaces(user, null, null, new WorkspaceUserMetadata(querymeta), null, null, false, false, false), expected); querymeta.clear(); querymeta.put("bro", "heim"); checkWSInfoList(ws.listWorkspaces(user, null, null, new WorkspaceUserMetadata(querymeta), null, null, false, false, false), expected); try { ws.listWorkspaces(user, null, null, meta1, null, null, false, false, false); fail("listed ws with bad meta"); } catch (IllegalArgumentException exp) { assertThat("correct exception", exp.getLocalizedMessage(), is("Only one metadata spec allowed")); } ws.setGlobalPermission(user2, new WorkspaceIdentifier("globalreadable"), Permission.NONE); ws.setWorkspaceDeleted(user2, new WorkspaceIdentifier("deletedglobalreadable"), false); ws.setGlobalPermission(user2, new WorkspaceIdentifier("deletedglobalreadable"), Permission.NONE); ws.setGlobalPermission(user, new WorkspaceIdentifier("globalws"), Permission.NONE); ws.setGlobalPermission(user3, new WorkspaceIdentifier("listuser3glws"), Permission.NONE); } @Test public void listWorkspacesByDate() throws Exception { WorkspaceUser u = new WorkspaceUser("listwsbydate"); WorkspaceInformation i1 = ws.createWorkspace(u, "listwsbydate1", false, null, null); Thread.sleep(100); WorkspaceInformation i2 = ws.createWorkspace(u, "listwsbydate2", false, null, null); Thread.sleep(100); WorkspaceInformation i3 = ws.createWorkspace(u, "listwsbydate3", false, null, null); Thread.sleep(100); WorkspaceInformation i4 = ws.createWorkspace(u, "listwsbydate4", false, null, null); Thread.sleep(100); WorkspaceInformation i5 = ws.createWorkspace(u, "listwsbydate5", false, null, null); Date beforeall = new Date(i1.getModDate().getTime() - 1); Date afterall = new Date(i5.getModDate().getTime() + 1); checkWSInfoList(ws.listWorkspaces(u, null, null, null, null, null, true, false, false), Arrays.asList(i1, i2, i3, i4, i5)); checkWSInfoList(ws.listWorkspaces(u, null, null, null, beforeall, afterall, true, false, false), Arrays.asList(i1, i2, i3, i4, i5)); checkWSInfoList(ws.listWorkspaces(u, null, null, null, afterall, beforeall, true, false, false), new ArrayList<WorkspaceInformation>()); checkWSInfoList(ws.listWorkspaces(u, null, null, null, i3.getModDate(), i4.getModDate(), true, false, false), new ArrayList<WorkspaceInformation>()); checkWSInfoList(ws.listWorkspaces(u, null, null, null, i2.getModDate(), i4.getModDate(), true, false, false), Arrays.asList(i3)); checkWSInfoList(ws.listWorkspaces(u, null, null, null, i2.getModDate(), null, true, false, false), Arrays.asList(i3, i4, i5)); checkWSInfoList(ws.listWorkspaces(u, null, null, null, null, i4.getModDate(), true, false, false), Arrays.asList(i1, i2, i3)); checkWSInfoList(ws.listWorkspaces(u, null, null, null, new Date(i2.getModDate().getTime() - 1), i5.getModDate(), true, false, false), Arrays.asList(i2, i3, i4)); } @Test public void listObjectsWithDeletedObjects() throws Exception { /* Test that deleted objects only show up in the objects list when * requested *and* when the user has permission to write to the * workspace, which is required for listing deleted objects. */ WorkspaceUser u1 = new WorkspaceUser("listObjDelUser1"); WorkspaceUser u2 = new WorkspaceUser("listObjDelUser2"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("listObjDel"); ws.createWorkspace(u1, wsi.getName(), false, null, null); ws.setPermissions(u1, wsi, Arrays.asList(u2), Permission.READ); ObjectInformation std = ws.saveObjects(u1, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("std"), new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(u1), false)), getIdFactory()).get(0); ObjectInformation del = ws.saveObjects(u1, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("del"), new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(u1), false)), getIdFactory()).get(0); ws.setObjectsDeleted(u1, Arrays.asList(new ObjectIdentifier(wsi, "del")), true); ListObjectsParameters lop = new ListObjectsParameters(u1, Arrays.asList(wsi)) .withIncludeMetaData(true); //test user1 - owner. Should always see deleted if requested. compareObjectInfo(ws.listObjects(lop), Arrays.asList(std)); compareObjectInfo(ws.listObjects(lop.withShowDeleted(true)), Arrays.asList(std, del)); compareObjectInfo(ws.listObjects(lop.withShowDeleted(false).withShowOnlyDeleted(true)), Arrays.asList(del)); lop = new ListObjectsParameters(u2, Arrays.asList(wsi)) .withIncludeMetaData(true); //test user2 with only read perms. Should never see deleted objects. compareObjectInfo(ws.listObjects(lop), Arrays.asList(std)); compareObjectInfo(ws.listObjects(lop.withShowDeleted(true)), Arrays.asList(std)); compareObjectInfo(ws.listObjects(lop.withShowDeleted(false).withShowOnlyDeleted(true)), new LinkedList<ObjectInformation>()); //test user2 with write perms. Should always see deleted if requested. ws.setPermissions(u1, wsi, Arrays.asList(u2), Permission.WRITE); compareObjectInfo(ws.listObjects(lop.withShowOnlyDeleted(false)), Arrays.asList(std)); compareObjectInfo(ws.listObjects(lop.withShowDeleted(true)), Arrays.asList(std, del)); compareObjectInfo(ws.listObjects(lop.withShowDeleted(false).withShowOnlyDeleted(true)), Arrays.asList(del)); } @Test public void listObjectsWithDeletedWorkspace() throws Exception { /* Test that objects from a deleted workspace don't show up in * listObjects output. */ WorkspaceUser u1 = new WorkspaceUser("listObjDelWSUser"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("listObjDelWS"); WorkspaceIdentifier wsdel = new WorkspaceIdentifier("listObjDelWS_Deleted"); ws.createWorkspace(u1, wsi.getName(), false, null, null); ws.createWorkspace(u1, wsdel.getName(), false, null, null); Map<String, String> pmeta = new HashMap<String, String>(); pmeta.put("test", "listObjDelWS"); WorkspaceUserMetadata meta = new WorkspaceUserMetadata(pmeta); ObjectInformation std = ws.saveObjects(u1, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("std"), new HashMap<String, String>(), SAFE_TYPE1, meta, new Provenance(u1), false)), getIdFactory()).get(0); ws.saveObjects(u1, wsdel, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("del"), new HashMap<String, String>(), SAFE_TYPE1, meta, new Provenance(u1), false)), getIdFactory()).get(0); ws.setWorkspaceDeleted(u1, wsdel, true); ListObjectsParameters lop = new ListObjectsParameters(u1, SAFE_TYPE1) .withMetadata(meta).withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop), Arrays.asList(std)); compareObjectInfo(ws.listObjects(lop.withShowDeleted(true)), Arrays.asList(std)); compareObjectInfo(ws.listObjects(lop.withShowDeleted(false).withShowOnlyDeleted(true)), new LinkedList<ObjectInformation>()); } @Test public void listObjectsAndHistory() throws Exception { WorkspaceUser user = new WorkspaceUser("listObjUser"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("listObj1"); WorkspaceIdentifier readable = new WorkspaceIdentifier("listObjread"); WorkspaceIdentifier writeable = new WorkspaceIdentifier("listObjwrite"); WorkspaceIdentifier adminable = new WorkspaceIdentifier("listObjadmin"); WorkspaceIdentifier thirdparty = new WorkspaceIdentifier("thirdparty"); WorkspaceUser user2 = new WorkspaceUser("listObjUser2"); long wsid1 = ws.createWorkspace(user, wsi.getName(), false, null, null).getId(); ws.createWorkspace(user2, readable.getName(), false, null, null).getId(); ws.setPermissions(user2, readable, Arrays.asList(user), Permission.READ); long wsidwrite = ws.createWorkspace(user2, writeable.getName(), false, null, null).getId(); ws.setPermissions(user2, writeable, Arrays.asList(user), Permission.WRITE); ws.createWorkspace(user2, adminable.getName(), false, null, null).getId(); ws.setPermissions(user2, adminable, Arrays.asList(user), Permission.ADMIN); WorkspaceUser user3 = new WorkspaceUser("listObjUser3"); ws.createWorkspace(user3, thirdparty.getName(), true, null, null).getId(); Map<String, String> pmeta = new HashMap<String, String>(); pmeta.put("meta1", "1"); Map<String, String> pmeta2 = new HashMap<String, String>(); pmeta2.put("meta2", "2"); Map<String, String> pmeta3 = new HashMap<String, String>(); pmeta3.put("meta3", "3"); Map<String, String> pmeta32 = new HashMap<String, String>(); pmeta32.put("meta3", "3"); pmeta32.put("meta2", "2"); WorkspaceUserMetadata meta = new WorkspaceUserMetadata(pmeta); WorkspaceUserMetadata meta2 = new WorkspaceUserMetadata(pmeta2); WorkspaceUserMetadata meta3 = new WorkspaceUserMetadata(pmeta3); WorkspaceUserMetadata meta32 = new WorkspaceUserMetadata(pmeta32); Map<String, Object> passTCdata = new HashMap<String, Object>(); passTCdata.put("thing", "athing"); ObjectInformation std = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("std"), new HashMap<String, String>(), SAFE_TYPE1, null, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation stdnometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(wsi, "std")), false, false).get(0); ObjectInformation objstack1 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("objstack"), new HashMap<String, String>(), SAFE_TYPE1_10, meta, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation objstack1nometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(wsi, "objstack", 1)), false, false).get(0); ObjectInformation objstack2 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("objstack"), passTCdata, SAFE_TYPE1_20, meta2, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation objstack2nometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(wsi, "objstack", 2)), false, false).get(0); ObjectInformation type2_1 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("type2"), new HashMap<String, String>(), SAFE_TYPE2, meta, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation type2_1nometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(wsi, "type2", 1)), false, false).get(0); ObjectInformation type2_2 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("type2"), new HashMap<String, String>(), SAFE_TYPE2_10, meta2, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation type2_2nometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(wsi, "type2", 2)), false, false).get(0); ObjectInformation type2_3 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("type2"), passTCdata, SAFE_TYPE2_20, meta32, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation type2_3nometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(wsi, "type2", 3)), false, false).get(0); ObjectInformation type2_4 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("type2"), passTCdata, SAFE_TYPE2_21, meta3, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation type2_4nometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(wsi, "type2", 4)), false, false).get(0); ObjectInformation stdws2 = ws.saveObjects(user2, writeable, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("stdws2"), new HashMap<String, String>(), SAFE_TYPE1, meta, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation stdws2nometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(writeable, "stdws2")), false, false).get(0); ObjectInformation hidden = ws.saveObjects(user, writeable, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("hidden"), new HashMap<String, String>(), SAFE_TYPE1, meta2, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation hiddennometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(writeable, "hidden")), false, false).get(0); ws.setObjectsHidden(user, Arrays.asList(new ObjectIdentifier(writeable, "hidden")), true); ObjectInformation deleted = ws.saveObjects(user2, writeable, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("deleted"), new HashMap<String, String>(), SAFE_TYPE1, meta32, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation deletednometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(writeable, "deleted")), false, false).get(0); ws.setObjectsDeleted(user, Arrays.asList(new ObjectIdentifier(writeable, "deleted")), true); ObjectInformation readobj = ws.saveObjects(user2, readable, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("readobj"), new HashMap<String, String>(), SAFE_TYPE1, meta3, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation readobjnometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(readable, "readobj")), false, false).get(0); ObjectInformation adminobj = ws.saveObjects(user2, adminable, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("adminobj"), new HashMap<String, String>(), SAFE_TYPE1, meta3, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation adminobjnometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(adminable, "adminobj")), false, false).get(0); ObjectInformation thirdobj = ws.saveObjects(user3, thirdparty, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("thirdobj"), new HashMap<String, String>(), SAFE_TYPE1, meta, new Provenance(user), false)), getIdFactory()).get(0); ObjectInformation thirdobjnometa = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(thirdparty, "thirdobj")), false, false).get(0); //this should be invisible to anyone except user3 ws.saveObjects(user3, thirdparty, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("thirdobjdel"), new HashMap<String, String>(), SAFE_TYPE1, meta, new Provenance(user), false)), getIdFactory()).get(0); ws.setObjectsDeleted(user3, Arrays.asList(new ObjectIdentifier(thirdparty, "thirdobjdel")), true); ObjectInformation lock = null; ObjectInformation locknometa = null; try { ListObjectsParameters lop = new ListObjectsParameters(user, Arrays.asList(lockWS)) .withIncludeMetaData(true); List<ObjectInformation> foo = ws.listObjects(lop); if (foo.size() > 1) { fail("found more than one object in the locked workspace, this is unexpected"); } if (foo.size() == 1) { lock = foo.get(0); locknometa = ws.listObjects(lop.withIncludeMetaData(false)).get(0); } } catch (NoSuchWorkspaceException nswe) { //do nothing, lock workspace wasn't created yet } TypeDefId allType1 = new TypeDefId(SAFE_TYPE1.getType().getTypeString()); TypeDefId allType2 = new TypeDefId(SAFE_TYPE2.getType().getTypeString()); //test with anon user ListObjectsParameters lop = new ListObjectsParameters(null, SAFE_TYPE1) .withShowDeleted(true).withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop), setUpListObjectsExpected(Arrays.asList(thirdobj), lock)); compareObjectInfo(ws.listObjects(lop.withExcludeGlobal(true)), new LinkedList<ObjectInformation>()); //test basics lop = new ListObjectsParameters(user, Arrays.asList(wsi, writeable)) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop), Arrays.asList(std, objstack1, objstack2, type2_1, type2_2, type2_3, type2_4, stdws2, hidden, deleted)); compareObjectInfo(ws.listObjects(lop.withShowOnlyDeleted(true)), Arrays.asList(deleted)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, Arrays.asList(wsi)) .withShowHidden(true).withShowDeleted(true).withShowOnlyDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), new ArrayList<ObjectInformation>()); compareObjectInfo(ws.listObjects(lop.withShowHidden(false).withShowOnlyDeleted(false)), Arrays.asList(std, objstack1, objstack2, type2_1, type2_2, type2_3, type2_4, stdws2, deleted)); compareObjectInfo(ws.listObjects(lop.withShowHidden(true).withShowDeleted(false)), Arrays.asList(std, objstack1, objstack2, type2_1, type2_2, type2_3, type2_4, stdws2, hidden)); compareObjectInfo(ws.listObjects(lop.withShowHidden(false)), Arrays.asList(std, objstack1, objstack2, type2_1, type2_2, type2_3, type2_4, stdws2)); compareObjectInfo(ws.listObjects(lop.withShowHidden(true).withShowDeleted(true) .withShowAllVersions(false)), Arrays.asList(std, objstack2, type2_4, stdws2, hidden, deleted)); compareObjectInfo(ws.listObjects(lop.withShowHidden(false).withShowDeleted(false) .withIncludeMetaData(false)), Arrays.asList(stdnometa, objstack2nometa, type2_4nometa, stdws2nometa)); compareObjectInfo(ws.listObjects(lop.withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true)), Arrays.asList(stdnometa, objstack1nometa, objstack2nometa, type2_1nometa, type2_2nometa, type2_3nometa, type2_4nometa, stdws2nometa, hiddennometa, deletednometa)); lop = new ListObjectsParameters(user, allType1) .withShowHidden(true).withShowDeleted(true).withShowAllVersions(true) .withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop), setUpListObjectsExpected(Arrays.asList(std, objstack1, objstack2, stdws2, hidden, deleted, readobj, adminobj, thirdobj), lock)); compareObjectInfo(ws.listObjects(lop.withSavers(new ArrayList<WorkspaceUser>())), setUpListObjectsExpected(Arrays.asList(std, objstack1, objstack2, stdws2, hidden, deleted, readobj, adminobj, thirdobj), lock)); //exclude globally readable workspaces compareObjectInfo(ws.listObjects(lop.withExcludeGlobal(true)), Arrays.asList(std, objstack1, objstack2, stdws2, hidden, deleted, readobj, adminobj)); //if the globally readable workspace is explicitly listed, should ignore excludeGlobal lop = new ListObjectsParameters(user, Arrays.asList(wsi, writeable, thirdparty)) .withShowHidden(true).withShowDeleted(true).withShowAllVersions(true) .withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop), Arrays.asList(std, objstack1, objstack2, type2_1, type2_2, type2_3, type2_4, stdws2, hidden, deleted, thirdobj)); compareObjectInfo(ws.listObjects(lop.withExcludeGlobal(true)), Arrays.asList(std, objstack1, objstack2, type2_1, type2_2, type2_3, type2_4, stdws2, hidden, deleted, thirdobj)); //test user filtering lop = new ListObjectsParameters(user, allType1) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop.withSavers(Arrays.asList(user, user2, user3))), Arrays.asList(std, objstack1, objstack2, stdws2, hidden, deleted, readobj, adminobj, thirdobj)); compareObjectInfo(ws.listObjects(lop.withSavers(Arrays.asList(user2, user3))), Arrays.asList(stdws2, deleted, readobj, adminobj, thirdobj)); compareObjectInfo(ws.listObjects(lop.withSavers(Arrays.asList(user, user3))), Arrays.asList(std, hidden, objstack1, objstack2, thirdobj)); compareObjectInfo(ws.listObjects(lop.withSavers(Arrays.asList(user3))), Arrays.asList(thirdobj)); compareObjectInfo(ws.listObjects(lop.withSavers(Arrays.asList(user))), Arrays.asList(std, hidden, objstack1, objstack2)); //meta filtering lop = new ListObjectsParameters(user, Arrays.asList(wsi, writeable)) .withMetadata(new WorkspaceUserMetadata()) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop), Arrays.asList(std, objstack1, objstack2, type2_1, type2_2, type2_3, type2_4, stdws2, hidden, deleted)); compareObjectInfo(ws.listObjects(lop.withMetadata(meta)), Arrays.asList(objstack1, type2_1, stdws2)); compareObjectInfo(ws.listObjects(lop.withMetadata(meta2)), Arrays.asList(objstack2, type2_2, type2_3, hidden, deleted)); compareObjectInfo(ws.listObjects(lop.withMetadata(meta3)), Arrays.asList(type2_3, type2_4, deleted)); compareObjectInfo(ws.listObjects(lop.withMetadata(meta).withShowAllVersions(false)), Arrays.asList(stdws2)); compareObjectInfo(ws.listObjects(lop.withMetadata(meta2)), Arrays.asList(objstack2, hidden, deleted)); //type filtering compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, Arrays.asList(wsi), allType1) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(std, objstack1, objstack2)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, Arrays.asList(writeable), allType1) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(stdws2, hidden, deleted)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, allType2) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(type2_1, type2_2, type2_3, type2_4)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, Arrays.asList(writeable), allType2) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), new ArrayList<ObjectInformation>()); //permission filtering lop = new ListObjectsParameters(user, SAFE_TYPE1) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop), setUpListObjectsExpected(Arrays.asList(std, stdws2, hidden, deleted, readobj, adminobj, thirdobj), lock)); compareObjectInfo(ws.listObjects(lop.withIncludeMetaData(false)), setUpListObjectsExpected(Arrays.asList(stdnometa, stdws2nometa, hiddennometa, deletednometa, readobjnometa, adminobjnometa, thirdobjnometa), locknometa)); compareObjectInfo(ws.listObjects(lop.withMinimumPermission(Permission.NONE)), setUpListObjectsExpected(Arrays.asList(stdnometa, stdws2nometa, hiddennometa, deletednometa, readobjnometa, adminobjnometa, thirdobjnometa), locknometa)); compareObjectInfo(ws.listObjects(lop.withMinimumPermission(Permission.READ)), setUpListObjectsExpected(Arrays.asList(stdnometa, stdws2nometa, hiddennometa, deletednometa, readobjnometa, adminobjnometa, thirdobjnometa), locknometa)); compareObjectInfo(ws.listObjects(lop.withMinimumPermission(Permission.WRITE)), Arrays.asList(stdnometa, stdws2nometa, hiddennometa, deletednometa, adminobjnometa)); compareObjectInfo(ws.listObjects(lop.withMinimumPermission(Permission.ADMIN)), Arrays.asList(stdnometa, adminobjnometa)); //more type filtering compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, SAFE_TYPE1_10) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(objstack1)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, SAFE_TYPE1_20) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(objstack2)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, SAFE_TYPE2) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(type2_1)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, SAFE_TYPE2_10) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(type2_2)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user, SAFE_TYPE2_20) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(type2_3)); compareObjectInfo(ws.listObjects(new ListObjectsParameters( user, new TypeDefId(SAFE_TYPE2_20.getType(), SAFE_TYPE2_20.getMajorVersion())) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(type2_3, type2_4)); compareObjectInfo(ws.listObjects(new ListObjectsParameters( user, new TypeDefId(SAFE_TYPE2_10.getType(), SAFE_TYPE2_10.getMajorVersion())) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(type2_2)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user2, allType1) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), setUpListObjectsExpected(Arrays.asList(stdws2, hidden, deleted, readobj, adminobj, thirdobj), lock)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user2, Arrays.asList(writeable)) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), Arrays.asList(stdws2, hidden, deleted)); compareObjectInfo(ws.listObjects(new ListObjectsParameters(user2, allType2) .withShowHidden(true).withShowDeleted(true) .withShowAllVersions(true).withIncludeMetaData(true)), new ArrayList<ObjectInformation>()); //TODO move these to unit tests for LOP // can't test 2 argument constructor with the 2nd constructor argument // null since then constructor is ambiguous try { new ListObjectsParameters(user, new LinkedList<WorkspaceIdentifier>()); fail("Created list objs param with bad init"); } catch (IllegalArgumentException e) { assertThat("Correct exception msg", e.getLocalizedMessage(), is("Must provide at least one workspace")); } try { new ListObjectsParameters(user, null, SAFE_TYPE1); fail("Created list objs param with bad init"); } catch (IllegalArgumentException e) { assertThat("Correct exception msg", e.getLocalizedMessage(), is("Must provide at least one workspace")); } try { new ListObjectsParameters(user, new LinkedList<WorkspaceIdentifier>(), SAFE_TYPE1); fail("Created list objs param with bad init"); } catch (IllegalArgumentException e) { assertThat("Correct exception msg", e.getLocalizedMessage(), is("Must provide at least one workspace")); } try { new ListObjectsParameters(user, Arrays.asList(wsi), null); fail("Created list objs param with bad init"); } catch (NullPointerException e) { assertThat("Correct exception msg", e.getLocalizedMessage(), is("Type cannot be null")); } failListObjects(user2, Arrays.asList(wsi, writeable), null, new WorkspaceAuthorizationException("User listObjUser2 may not read workspace listObj1")); failListObjects(null, Arrays.asList(wsi, writeable), null, new WorkspaceAuthorizationException("Anonymous users may not read workspace listObj1")); failListObjects(user, Arrays.asList(writeable, new WorkspaceIdentifier("listfake")), null, new NoSuchWorkspaceException("No workspace with name listfake exists", wsi)); failListObjects(user, Arrays.asList(wsi, writeable), meta32.getMetadata(), new IllegalArgumentException("Only one metadata spec allowed")); ws.createWorkspace(user, "listdel", false, null, null); ws.setWorkspaceDeleted(user, new WorkspaceIdentifier("listdel"), true); failListObjects(user, Arrays.asList(writeable, new WorkspaceIdentifier("listdel")), null, new NoSuchWorkspaceException("Workspace listdel is deleted", wsi)); assertThat("correct object history for std", ws.getObjectHistory(user, new ObjectIdentifier(wsi, "std")), is(Arrays.asList(std))); assertThat("correct object history for type2", ws.getObjectHistory(user, new ObjectIdentifier(wsi, "type2")), is(Arrays.asList(type2_1, type2_2, type2_3, type2_4))); assertThat("correct object history for type2", ws.getObjectHistory(user, new ObjectIdentifier(wsi, 3)), is(Arrays.asList(type2_1, type2_2, type2_3, type2_4))); assertThat("correct object history for type2", ws.getObjectHistory(user, new ObjectIdentifier(wsi, "type2", 3)), is(Arrays.asList(type2_1, type2_2, type2_3, type2_4))); assertThat("correct object history for type2", ws.getObjectHistory(user, new ObjectIdentifier(wsi, 3, 4)), is(Arrays.asList(type2_1, type2_2, type2_3, type2_4))); assertThat("correct object history for objstack", ws.getObjectHistory(user, new ObjectIdentifier(wsi, "objstack")), is(Arrays.asList(objstack1, objstack2))); assertThat("correct object history for stdws2", ws.getObjectHistory(user2, new ObjectIdentifier(writeable, "stdws2")), is(Arrays.asList(stdws2))); failGetObjectHistory(user, new ObjectIdentifier(wsi, "booger"), new NoSuchObjectException("No object with name booger exists in workspace " + wsid1)); failGetObjectHistory(user, new ObjectIdentifier(new WorkspaceIdentifier("listObjectsfake"), "booger"), new InaccessibleObjectException("Object booger cannot be accessed: No workspace with name listObjectsfake exists")); failGetObjectHistory(user, new ObjectIdentifier(new WorkspaceIdentifier("listdel"), "booger"), new InaccessibleObjectException("Object booger cannot be accessed: Workspace listdel is deleted")); failGetObjectHistory(user2, new ObjectIdentifier(wsi, 3), new InaccessibleObjectException("Object 3 cannot be accessed: User listObjUser2 may not read workspace listObj1")); failGetObjectHistory(null, new ObjectIdentifier(wsi, 3), new InaccessibleObjectException("Object 3 cannot be accessed: Anonymous users may not read workspace listObj1")); failGetObjectHistory(user2, new ObjectIdentifier(writeable, "deleted"), new InaccessibleObjectException("Object 3 (name deleted) in workspace " + wsidwrite + " has been deleted")); ws.setGlobalPermission(user3, new WorkspaceIdentifier("thirdparty"), Permission.NONE); } @Test public void listObjectsByDate() throws Exception { WorkspaceUser u = new WorkspaceUser("listObjsByDate"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("listObjsByDateWS"); ws.createWorkspace(u, wsi.getName(), false, null, null); Map<String, String> data = new HashMap<String, String>(); Provenance p = new Provenance(u); ObjectInformation o1 = saveObject(u, wsi, null, data, SAFE_TYPE1, "o1", p); Thread.sleep(100); ObjectInformation o2 = saveObject(u, wsi, null, data, SAFE_TYPE1, "o2", p); Thread.sleep(100); ObjectInformation o3 = saveObject(u, wsi, null, data, SAFE_TYPE1, "o3", p); Thread.sleep(100); ObjectInformation o4 = saveObject(u, wsi, null, data, SAFE_TYPE1, "o4", p); Thread.sleep(100); ObjectInformation o5 = saveObject(u, wsi, null, data, SAFE_TYPE1, "o5", p); Date beforeall = new Date(o1.getSavedDate().getTime() - 1); Date afterall = new Date(o5.getSavedDate().getTime() + 1); ListObjectsParameters lop = new ListObjectsParameters(u, Arrays.asList(wsi)) .withIncludeMetaData(true); compareObjectInfo(ws.listObjects(lop), Arrays.asList(o1, o2, o3, o4, o5)); compareObjectInfo(ws.listObjects(lop.withAfter(beforeall).withBefore(afterall)), Arrays.asList(o1, o2, o3, o4, o5)); compareObjectInfo(ws.listObjects(lop.withAfter(afterall).withBefore(beforeall)), new ArrayList<ObjectInformation>()); compareObjectInfo(ws.listObjects(lop.withAfter(o3.getSavedDate()).withBefore(o4.getSavedDate())), new ArrayList<ObjectInformation>()); compareObjectInfo(ws.listObjects(lop.withAfter(o2.getSavedDate()).withBefore(null)), Arrays.asList(o3, o4, o5)); compareObjectInfo(ws.listObjects(lop.withAfter(null).withBefore(o4.getSavedDate())), Arrays.asList(o1, o2, o3)); compareObjectInfo(ws.listObjects(lop.withAfter(o2.getSavedDate()).withBefore(o4.getSavedDate())), Arrays.asList(o3)); compareObjectInfo(ws.listObjects(lop.withAfter(new Date(o2.getSavedDate().getTime() -1)) .withBefore(o5.getSavedDate())), Arrays.asList(o2, o3, o4)); } @Test public void getObjectSubdata() throws Exception { /* note most tests are performed at the same time as getObjects, so * only issues specific to subsets are tested here */ WorkspaceUser user = new WorkspaceUser("subUser"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("subData"); WorkspaceUser user2 = new WorkspaceUser("subUser2"); long wsid1 = ws.createWorkspace(user, wsi.getName(), false, null, null).getId(); TypeDefId reftype = new TypeDefId(new TypeDefName("CopyRev", "RefType"), 1, 0); Map<String, String> pmeta = new HashMap<String, String>(); pmeta.put("metastuff", "meta"); Map<String, String> pmeta2 = new HashMap<String, String>(); pmeta2.put("meta2", "my hovercraft is full of eels"); WorkspaceUserMetadata meta = new WorkspaceUserMetadata(pmeta); WorkspaceUserMetadata meta2 = new WorkspaceUserMetadata(pmeta2); Provenance p1 = new Provenance(user); p1.addAction(new ProvenanceAction().withDescription("provenance 1") .withWorkspaceObjects(Arrays.asList("subData/auto1"))); Provenance p2 = new Provenance(user); p2.addAction(new ProvenanceAction().withDescription("provenance 2") .withWorkspaceObjects(Arrays.asList("subData/auto2"))); Map<String, Object> data1 = createData( "{\"map\": {\"id1\": {\"id\": 1," + " \"thing\": \"foo\"}," + " \"id2\": {\"id\": 2," + " \"thing\": \"foo2\"}," + " \"id3\": {\"id\": 3," + " \"thing\": \"foo3\"}" + " }," + " \"refs\": [\"subData/auto1\"]" + "}" ); Map<String, Object> data2 = createData( "{\"array\": [{\"id\": 1," + " \"thing\": \"foo\"}," + " {\"id\": 2," + " \"thing\": \"foo2\"}," + " {\"id\": 3," + " \"thing\": \"foo3\"}" + " ]," + " \"refs\": [\"subData/auto2\"]" + "}" ); Map<String, Object> data3 = createData( "{\"array\": [{\"id\": 1," + " \"thing\": \"foo\"}," + " {\"id\": 2," + " \"thing\": \"foo2\"}," + " null," + " {\"id\": 4," + " \"thing\": \"foo4\"}" + " ]," + " \"refs\": [\"subData/auto2\"]" + "}" ); ws.saveObjects(user, wsi, Arrays.asList( new WorkspaceSaveObject(data1, SAFE_TYPE1, meta, new Provenance(user), false), new WorkspaceSaveObject(data1, SAFE_TYPE1, meta, new Provenance(user), false)), getIdFactory()); ObjectInformation o1 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("o1"), data1, reftype, meta, p1, false)), getIdFactory()).get(0); ObjectInformation o2 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("o2"), data2, reftype, meta2, p2, false)), getIdFactory()).get(0); ObjectInformation o3 = ws.saveObjects(user, wsi, Arrays.asList(new WorkspaceSaveObject( new ObjectIDNoWSNoVer("o3"), data3, reftype, meta, p2, false)), getIdFactory()).get(0); ObjectIdentifier oident1 = new ObjectIdentifier(wsi, "o1"); ObjectIdentifier oident2 = new ObjectIdentifier(wsi, 4); ObjectIdentifier oident3 = ObjectIdentifier.parseObjectReference("subData/o3"); List<String> refs1 = Arrays.asList(wsid1 + "/1/1"); Map<String, String> refmap1 = new HashMap<String, String>(); refmap1.put("subData/auto1", wsid1 + "/1/1"); List<String> refs2 = Arrays.asList(wsid1 + "/2/1"); Map<String, String> refmap2 = new HashMap<String, String>(); refmap2.put("subData/auto2", wsid1 + "/2/1"); List<WorkspaceObjectData> got = ws.getObjectsSubSet(user, Arrays.asList( new SubObjectIdentifier(oident1, new ObjectPaths( Arrays.asList("/map/id3", "/map/id1"))), new SubObjectIdentifier(oident1, new ObjectPaths( Arrays.asList("/map/id2"))), new SubObjectIdentifier(oident2, new ObjectPaths( Arrays.asList("/array/2", "/array/0"))), new SubObjectIdentifier(oident3, new ObjectPaths( Arrays.asList("/array/2", "/array/0", "/array/3"))))); Map<String, Object> expdata1 = createData( "{\"map\": {\"id1\": {\"id\": 1," + " \"thing\": \"foo\"}," + " \"id3\": {\"id\": 3," + " \"thing\": \"foo3\"}" + " }" + "}" ); Map<String, Object> expdata2 = createData( "{\"map\": {\"id2\": {\"id\": 2," + " \"thing\": \"foo2\"}" + " }" + "}" ); Map<String, Object> expdata3 = createData( "{\"array\": [{\"id\": 1," + " \"thing\": \"foo\"}," + " {\"id\": 3," + " \"thing\": \"foo3\"}" + " ]" + "}" ); Map<String, Object> expdata4 = createData( "{\"array\": [{\"id\": 1," + " \"thing\": \"foo\"}," + " null," + " {\"id\": 4," + " \"thing\": \"foo4\"}" + " ]" + "}" ); compareObjectAndInfo(got.get(0), o1, p1, expdata1, refs1, refmap1); compareObjectAndInfo(got.get(1), o1, p1, expdata2, refs1, refmap1); compareObjectAndInfo(got.get(2), o2, p2, expdata3, refs2, refmap2); compareObjectAndInfo(got.get(3), o3, p2, expdata4, refs2, refmap2); // new test for extractor that fails on an array OOB failGetSubset(user, Arrays.asList( new SubObjectIdentifier(oident2, new ObjectPaths( Arrays.asList("/array/3", "/array/0")))), new TypedObjectExtractionException( "Invalid selection: no array element exists at position '3', at: /array/3")); got = ws.getObjectsSubSet(user, Arrays.asList( new SubObjectIdentifier(oident1, new ObjectPaths( Arrays.asList("/map/*/thing"))), new SubObjectIdentifier(oident2, new ObjectPaths( Arrays.asList("/array/[*]/thing"))))); expdata1 = createData( "{\"map\": {\"id1\": {\"thing\": \"foo\"}," + " \"id2\": {\"thing\": \"foo2\"}," + " \"id3\": {\"thing\": \"foo3\"}" + " }" + "}" ); expdata2 = createData( "{\"array\": [{\"thing\": \"foo\"}," + " {\"thing\": \"foo2\"}," + " {\"thing\": \"foo3\"}" + " ]" + "}" ); compareObjectAndInfo(got.get(0), o1, p1, expdata1, refs1, refmap1); compareObjectAndInfo(got.get(1), o2, p2, expdata2, refs2, refmap2); failGetSubset(user, Arrays.asList( new SubObjectIdentifier(oident1, new ObjectPaths( Arrays.asList("/map/id1/id/5")))), new TypedObjectExtractionException( "Invalid selection: the path given specifies fields or elements that do not exist " + "because data at this location is a scalar value (i.e. string, integer, float), at: /map/id1/id")); failGetSubset(user2, Arrays.asList( new SubObjectIdentifier(oident1, new ObjectPaths( Arrays.asList("/map/*/thing")))), new InaccessibleObjectException( "Object o1 cannot be accessed: User subUser2 may not read workspace subData")); try { ws.getObjectsSubSet(user2, Arrays.asList(new SubObjectIdentifier( new ObjectIdentifier(wsi, 2), null))); fail("Able to get obj data from private workspace"); } catch (InaccessibleObjectException ioe) { assertThat("correct exception message", ioe.getLocalizedMessage(), is("Object 2 cannot be accessed: User subUser2 may not read workspace subData")); assertThat("correct object returned", ioe.getInaccessibleObject(), is(new ObjectIdentifier(wsi, 2))); } } @Test public void getReferencingObjects() throws Exception { WorkspaceUser user1 = new WorkspaceUser("refUser"); WorkspaceUser user2 = new WorkspaceUser("refUser2"); WorkspaceIdentifier wsitar1 = new WorkspaceIdentifier("refstarget1"); WorkspaceIdentifier wsitar2 = new WorkspaceIdentifier("refstarget2"); WorkspaceIdentifier wsisrc1 = new WorkspaceIdentifier("refssource1"); WorkspaceIdentifier wsisrc2 = new WorkspaceIdentifier("refssource2"); WorkspaceIdentifier wsisrc2noaccess = new WorkspaceIdentifier("refssource2noaccess"); WorkspaceIdentifier wsisrcdel1 = new WorkspaceIdentifier("refssourcedel1"); WorkspaceIdentifier wsisrc2gl = new WorkspaceIdentifier("refssourcegl"); long wsid = ws.createWorkspace(user1, wsitar1.getName(), false, null, null).getId(); ws.setPermissions(user1, wsitar1, Arrays.asList(user2), Permission.READ); ws.createWorkspace(user2, wsitar2.getName(), false, null, null); ws.setPermissions(user2, wsitar2, Arrays.asList(user1), Permission.READ); ws.createWorkspace(user1, wsisrc1.getName(), false, null, null); ws.createWorkspace(user2, wsisrc2.getName(), false, null, null); ws.setPermissions(user2, wsisrc2, Arrays.asList(user1), Permission.READ); ws.createWorkspace(user2, wsisrc2noaccess.getName(), false, null, null); ws.createWorkspace(user1, wsisrcdel1.getName(), false, null, null); ws.createWorkspace(user2, wsisrc2gl.getName(), true, null, null); TypeDefId reftype = new TypeDefId(new TypeDefName("CopyRev", "RefType"), 1, 0); Map<String, String> pmeta1 = new HashMap<String, String>(); pmeta1.put("metastuff", "meta"); Map<String, String> pmeta2 = new HashMap<String, String>(); pmeta2.put("meta2", "my hovercraft is full of eels"); WorkspaceUserMetadata meta1 = new WorkspaceUserMetadata(pmeta1); WorkspaceUserMetadata meta2 = new WorkspaceUserMetadata(pmeta2); Map<String, Object> mtdata = new HashMap<String, Object>(); Provenance p1 = new Provenance(user1); //test objects with no references or no accessible references ws.saveObjects(user1, wsitar1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("norefs"), mtdata, SAFE_TYPE1, null, p1, false), new WorkspaceSaveObject(new ObjectIDNoWSNoVer("deletedref"), mtdata, SAFE_TYPE1, null, p1, false), new WorkspaceSaveObject(new ObjectIDNoWSNoVer("unreadableref"), mtdata, SAFE_TYPE1, null, p1, false), new WorkspaceSaveObject(new ObjectIDNoWSNoVer("deletedprovref"), mtdata, SAFE_TYPE1, null, p1, false), new WorkspaceSaveObject(new ObjectIDNoWSNoVer("unreadableprovref"), mtdata, SAFE_TYPE1, null, p1, false)), getIdFactory()); Map<String, Object> refdata = new HashMap<String, Object>(); refdata.put("refs", Arrays.asList("refstarget1/deletedref")); ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("delrefptr"), refdata, reftype, null, p1, false)), getIdFactory()); ws.setObjectsDeleted(user1, Arrays.asList( new ObjectIdentifier(wsisrc1, "delrefptr")), true); refdata.put("refs", Arrays.asList("refstarget1/unreadableref")); ws.saveObjects(user2, wsisrc2noaccess, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("unreadrefptr"), refdata, reftype, null, p1, false)), getIdFactory()); ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("deletedprovrefptr"), mtdata, SAFE_TYPE1, null, new Provenance(user1).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget1/deletedprovref"))), false)), getIdFactory()); ws.setObjectsDeleted(user1, Arrays.asList( new ObjectIdentifier(wsisrc1, "deletedprovrefptr")), true); ws.saveObjects(user2, wsisrc2noaccess, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("unreadableprovrefptr"), mtdata, SAFE_TYPE1, null, new Provenance(user1).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget1/unreadableprovref"))), false)), getIdFactory()); List<Set<ObjectInformation>> mtrefs = new ArrayList<Set<ObjectInformation>>(); mtrefs.add(new HashSet<ObjectInformation>()); for (String name: Arrays.asList("norefs", "deletedref", "unreadableref", "deletedprovref", "unreadableprovref")) { assertThat("ref lists empty", ws.getReferencingObjects(user1, Arrays.asList(new ObjectIdentifier(wsitar1, name))), is(mtrefs)); } ws.saveObjects(user1, wsitar1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("stk"), mtdata, SAFE_TYPE1, meta1, p1, false), new WorkspaceSaveObject(new ObjectIDNoWSNoVer("stk"), mtdata, SAFE_TYPE1, meta2, p1, false), new WorkspaceSaveObject(new ObjectIDNoWSNoVer("single"), mtdata, SAFE_TYPE1, meta1, p1, false)), getIdFactory()); ws.saveObjects(user2, wsitar2, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("stk2"), mtdata, SAFE_TYPE1, meta1, p1, false), new WorkspaceSaveObject(new ObjectIDNoWSNoVer("stk2"), mtdata, SAFE_TYPE1, meta2, p1, false), new WorkspaceSaveObject(new ObjectIDNoWSNoVer("single2"), mtdata, SAFE_TYPE1, meta1, p1, false)), getIdFactory()); refdata.put("refs", Arrays.asList("refstarget1/stk/1")); ObjectInformation stdref1 = ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("stdref"), refdata, reftype, meta1, new Provenance(user1).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget1/stk/1"))), false)), getIdFactory()).get(0); refdata.put("refs", Arrays.asList("refstarget1/stk/2")); ObjectInformation stdref2 = ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("stdref"), refdata, reftype, meta2, new Provenance(user1), false)), getIdFactory()).get(0); refdata.put("refs", Arrays.asList("refstarget1/stk")); ObjectInformation hiddenref = ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("hiddenref"), refdata, reftype, meta1, new Provenance(user1), true)), getIdFactory()).get(0); refdata.put("refs", Arrays.asList("refstarget2/stk2")); @SuppressWarnings("unused") ObjectInformation delref = ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("delref"), refdata, reftype, meta1, new Provenance(user1).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget1/stk/2"))), true)), getIdFactory()).get(0); ws.setObjectsDeleted(user1, Arrays.asList(new ObjectIdentifier(wsisrc1, "delref")), true); refdata.put("refs", Arrays.asList("refstarget1/single")); ObjectInformation readable = ws.saveObjects(user2, wsisrc2, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("readable"), refdata, reftype, meta2, new Provenance(user2), true)), getIdFactory()).get(0); refdata.put("refs", Arrays.asList("refstarget2/stk2/2")); @SuppressWarnings("unused") ObjectInformation unreadable = ws.saveObjects(user2, wsisrc2noaccess, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("unreadable"), refdata, reftype, meta1, new Provenance(user2), true)), getIdFactory()).get(0); refdata.put("refs", Arrays.asList("refstarget2/single2/1")); @SuppressWarnings("unused") ObjectInformation wsdeletedreadable1 = ws.saveObjects(user1, wsisrcdel1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("wsdeletedreadable1"), refdata, reftype, meta2, new Provenance(user1), false)), getIdFactory()).get(0); ws.setWorkspaceDeleted(user1, wsisrcdel1, true); refdata.put("refs", Arrays.asList("refstarget2/stk2/1")); ObjectInformation globalrd = ws.saveObjects(user2, wsisrc2gl, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("globalrd"), refdata, reftype, meta1, new Provenance(user2).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget1/single/1"))), false)), getIdFactory()).get(0); List<ObjectIdentifier> objs = Arrays.asList( new ObjectIdentifier(wsitar1, "stk"), new ObjectIdentifier(wsitar1, "stk", 2), new ObjectIdentifier(wsitar1, "stk", 1)); assertThat("got correct refs", ws.getReferencingObjects(user1, objs), is(Arrays.asList( oiset(stdref2, hiddenref), oiset(stdref2, hiddenref), oiset(stdref1)))); assertThat("got correct refcounts", ws.getReferencingObjectCounts(user1, objs), is(Arrays.asList(3, 3, 1))); Set<ObjectInformation> mtoiset = new HashSet<ObjectInformation>(); objs = Arrays.asList( new ObjectIdentifier(wsitar2, "stk2"), new ObjectIdentifier(wsitar2, "stk2", 2), new ObjectIdentifier(wsitar2, "stk2", 1)); assertThat("got correct refs", ws.getReferencingObjects(user1, objs), is(Arrays.asList( mtoiset, mtoiset, oiset(globalrd)))); assertThat("got correct refcounts", ws.getReferencingObjectCounts(user1, objs), is(Arrays.asList(2, 2, 1))); objs = Arrays.asList( new ObjectIdentifier(wsitar1, "single"), new ObjectIdentifier(wsitar1, "single", 1), new ObjectIdentifier(wsitar2, "single2"), new ObjectIdentifier(wsitar2, "single2", 1)); assertThat("got correct refs", ws.getReferencingObjects(user1,objs), is(Arrays.asList( oiset(readable, globalrd), oiset(readable, globalrd), mtoiset, mtoiset))); assertThat("got correct refcounts", ws.getReferencingObjectCounts(user1, objs), is(Arrays.asList(2, 2, 1, 1))); ObjectInformation pstdref1 = ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("pstdref"), mtdata, SAFE_TYPE1, meta1, new Provenance(user1).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget1/stk/1"))), false)), getIdFactory()).get(0); ObjectInformation pstdref2 = ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("pstdref"), mtdata, SAFE_TYPE1, meta2, new Provenance(user1).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget1/stk/2"))), false)), getIdFactory()).get(0); ObjectInformation phiddenref = ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("phiddenref"), mtdata, SAFE_TYPE1, meta1, new Provenance(user1).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget1/stk"))), true)), getIdFactory()).get(0); @SuppressWarnings("unused") ObjectInformation pdelref = ws.saveObjects(user1, wsisrc1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("pdelref"), mtdata, SAFE_TYPE1, meta1, new Provenance(user1).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget2/stk2"))), true)), getIdFactory()).get(0); ws.setObjectsDeleted(user1, Arrays.asList(new ObjectIdentifier(wsisrc1, "pdelref")), true); ObjectInformation preadable = ws.saveObjects(user2, wsisrc2, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("preadable"), mtdata, SAFE_TYPE1, meta2, new Provenance(user2).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget1/single"))), true)), getIdFactory()).get(0); @SuppressWarnings("unused") ObjectInformation punreadable = ws.saveObjects(user2, wsisrc2noaccess, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("punreadable"), mtdata, SAFE_TYPE1, meta1, new Provenance(user2).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget2/stk2/2"))), true)), getIdFactory()).get(0); ws.setWorkspaceDeleted(user1, wsisrcdel1, false); @SuppressWarnings("unused") ObjectInformation pwsdeletedreadable1 = ws.saveObjects(user1, wsisrcdel1, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("pwsdeletedreadable1"), mtdata, SAFE_TYPE1, meta2, new Provenance(user1).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget2/single2/1"))), false)), getIdFactory()).get(0); ws.setWorkspaceDeleted(user1, wsisrcdel1, true); ObjectInformation pglobalrd = ws.saveObjects(user2, wsisrc2gl, Arrays.asList( new WorkspaceSaveObject(new ObjectIDNoWSNoVer("pglobalrd"), mtdata, SAFE_TYPE1, meta1, new Provenance(user2).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList("refstarget2/stk2/1"))), false)), getIdFactory()).get(0); objs = Arrays.asList( new ObjectIdentifier(wsitar1, "stk"), new ObjectIdentifier(wsitar1, "stk", 2), new ObjectIdentifier(wsitar1, "stk", 1)); assertThat("got correct refs", ws.getReferencingObjects(user1, objs), is(Arrays.asList( oiset(stdref2, hiddenref, pstdref2, phiddenref), oiset(stdref2, hiddenref, pstdref2, phiddenref), oiset(stdref1, pstdref1)))); assertThat("got correct refcounts", ws.getReferencingObjectCounts(user1, objs), is(Arrays.asList(5, 5, 2))); objs = Arrays.asList( new ObjectIdentifier(wsitar2, "stk2"), new ObjectIdentifier(wsitar2, "stk2", 2), new ObjectIdentifier(wsitar2, "stk2", 1)); assertThat("got correct refs", ws.getReferencingObjects(user1, objs), is(Arrays.asList( mtoiset, mtoiset, oiset(globalrd, pglobalrd)))); assertThat("got correct refcounts", ws.getReferencingObjectCounts(user1, objs), is(Arrays.asList(4, 4, 2))); objs = Arrays.asList( new ObjectIdentifier(wsitar1, "single"), new ObjectIdentifier(wsitar1, "single", 1), new ObjectIdentifier(wsitar2, "single2"), new ObjectIdentifier(wsitar2, "single2", 1)); assertThat("got correct refs", ws.getReferencingObjects(user1, objs), is(Arrays.asList( oiset(readable, globalrd, preadable), oiset(readable, globalrd, preadable), mtoiset, mtoiset))); assertThat("got correct refcounts", ws.getReferencingObjectCounts(user1, objs), is(Arrays.asList(3, 3, 2, 2))); try { ws.getReferencingObjects(user2, Arrays.asList( new ObjectIdentifier(wsisrc1, 1))); fail("Able to get ref obj data from private workspace"); } catch (InaccessibleObjectException ioe) { assertThat("correct exception message", ioe.getLocalizedMessage(), is("Object 1 cannot be accessed: User refUser2 may not read workspace refssource1")); assertThat("correct object returned", ioe.getInaccessibleObject(), is(new ObjectIdentifier(wsisrc1, 1))); } try { ws.getReferencingObjectCounts(user2, Arrays.asList( new ObjectIdentifier(wsisrc1, 1))); fail("Able to get ref obj count from private workspace"); } catch (InaccessibleObjectException ioe) { assertThat("correct exception message", ioe.getLocalizedMessage(), is("Object 1 cannot be accessed: User refUser2 may not read workspace refssource1")); assertThat("correct object returned", ioe.getInaccessibleObject(), is(new ObjectIdentifier(wsisrc1, 1))); } try { ws.getReferencingObjectCounts(user1, Arrays.asList( new ObjectIdentifier(wsitar1, "single", 2))); fail("Able to get ref obj count for non-existant obj version"); } catch (NoSuchObjectException ioe) { assertThat("correct exception message", ioe.getLocalizedMessage(), is("No object with id 7 (name single) and version 2 exists in workspace " + wsid)); ObjectIDResolvedWS resobj = ioe.getResolvedInaccessibleObject(); assertThat("correct ws id in returned oid", resobj.getWorkspaceIdentifier().getID(), is(wsid)); assertThat("correct ws name in returned oid", resobj.getWorkspaceIdentifier().getName(), is(wsitar1.getName())); assertThat("correct objid in returned oid", resobj.getId(), is((Long) null)); assertThat("correct obj name in returned oid", resobj.getName(), is("single")); assertThat("correct obj ver in returned oid", resobj.getVersion(), is(2)); } ws.setGlobalPermission(user2, wsisrc2gl, Permission.NONE); } @Test public void getReferencedObjects() throws Exception { WorkspaceUser user1 = new WorkspaceUser("refedUser"); WorkspaceUser user2 = new WorkspaceUser("refedUser2"); WorkspaceIdentifier wsiacc1 = new WorkspaceIdentifier("refedaccessible"); WorkspaceIdentifier wsiacc2 = new WorkspaceIdentifier("refedaccessible2"); WorkspaceIdentifier wsiun1 = new WorkspaceIdentifier("refedunacc"); WorkspaceIdentifier wsiun2 = new WorkspaceIdentifier("refedunacc2"); WorkspaceIdentifier wsidel = new WorkspaceIdentifier("refeddel"); ws.createWorkspace(user1, wsiacc1.getName(), false, null, null); ws.setPermissions(user1, wsiacc1, Arrays.asList(user2), Permission.WRITE); ws.createWorkspace(user2, wsiacc2.getName(), true, null, null); long wsidun1 = ws.createWorkspace(user2, wsiun1.getName(), false, null, null).getId(); long wsidun2 = ws.createWorkspace(user2, wsiun2.getName(), false, null, null).getId(); ws.createWorkspace(user2, wsidel.getName(), false, null, null); TypeDefId reftype = new TypeDefId(new TypeDefName("CopyRev", "RefType"), 1, 0); Map<String, String> meta1 = new HashMap<String, String>(); meta1.put("some", "very special metadata"); Map<String, String> meta2 = new HashMap<String, String>(); meta2.put("some", "very special metadata2"); Map<String, String> mtdata = new HashMap<String, String>(); Map<String, Object> data1 = createData( "{\"thing1\": \"whoop whoop\"," + " \"thing2\": \"aroooga\"}"); Map<String, Object> data2 = createData( "{\"thing3\": \"whoop whoop\"," + " \"thing4\": \"aroooga\"}"); ObjectInformation leaf1 = saveObject(user2, wsiun1, meta1, data1, SAFE_TYPE1, "leaf1", new Provenance(user2)); ObjectIdentifier leaf1oi = new ObjectIdentifier(wsiun1, "leaf1"); failGetObjects(user1, Arrays.asList(leaf1oi), new InaccessibleObjectException( "Object leaf1 cannot be accessed: User refedUser may not read workspace refedunacc")); ObjectInformation leaf2 = saveObject(user2, wsiun2, meta2, data2, SAFE_TYPE1, "leaf2", new Provenance(user2)); ObjectIdentifier leaf2oi = new ObjectIdentifier(wsiun2, "leaf2"); failGetObjects(user1, Arrays.asList(leaf2oi), new InaccessibleObjectException( "Object leaf2 cannot be accessed: User refedUser may not read workspace refedunacc2")); saveObject(user2, wsiun2, meta2, data2, SAFE_TYPE1, "unlinked", new Provenance(user2)); ObjectIdentifier unlinkedoi = new ObjectIdentifier(wsiun2, "unlinked"); failGetObjects(user1, Arrays.asList(unlinkedoi), new InaccessibleObjectException( "Object unlinked cannot be accessed: User refedUser may not read workspace refedunacc2")); final String leaf1r = "refedunacc/leaf1"; saveObject(user2, wsiacc1, MT_META, makeRefData(leaf1r),reftype, "simpleref", new Provenance(user2)); final String leaf2r = "refedunacc2/leaf2"; saveObject(user2, wsiacc2, MT_META, makeRefData(leaf2r),reftype, "simpleref2", new Provenance(user2)); saveObject(user2, wsiacc1, MT_META, mtdata, SAFE_TYPE1, "provref", new Provenance(user2) .addAction(new ProvenanceAction().withWorkspaceObjects( Arrays.asList(leaf1r)))); saveObject(user2, wsiacc2, MT_META, mtdata, SAFE_TYPE1, "provref2", new Provenance(user2) .addAction(new ProvenanceAction().withWorkspaceObjects( Arrays.asList(leaf2r)))); final HashMap<String, String> mtmap = new HashMap<String, String>(); final LinkedList<String> mtlist = new LinkedList<String>(); checkReferencedObject(user1, new ObjectChain(new ObjectIdentifier(wsiacc1, "simpleref"), Arrays.asList(leaf1oi)), leaf1, new Provenance(user2), data1, mtlist, mtmap); checkReferencedObject(user1, new ObjectChain(new ObjectIdentifier(wsiacc2, "simpleref2"), Arrays.asList(leaf2oi)), leaf2, new Provenance(user2), data2, mtlist, mtmap); checkReferencedObject(user1, new ObjectChain(new ObjectIdentifier(wsiacc1, "provref"), Arrays.asList(leaf1oi)), leaf1, new Provenance(user2), data1, mtlist, mtmap); checkReferencedObject(user1, new ObjectChain(new ObjectIdentifier(wsiacc2, "provref2"), Arrays.asList(leaf2oi)), leaf2, new Provenance(user2), data2, mtlist, mtmap); failGetReferencedObjects(user1, Arrays.asList(new ObjectChain(new ObjectIdentifier(wsiacc2, "simpleref2"), Arrays.asList(leaf1oi))), new NoSuchReferenceException( "The object simpleref2 in workspace refedaccessible2 does not contain the reference " + wsidun1 + "/1/1", null, null)); ObjectInformation del1 = saveObject(user2, wsiun1, meta2, makeRefData(leaf1r, leaf2r), reftype, "del1", new Provenance(user2)); ObjectIdentifier del1oi = new ObjectIdentifier(wsiun1, "del1"); final Provenance p = new Provenance(user2).addAction(new ProvenanceAction() .withWorkspaceObjects(Arrays.asList(leaf1r, leaf2r))); ObjectInformation del2 = saveObject(user2, wsiun2, meta1, makeRefData(), reftype, "del2", p); ObjectIdentifier del2oi = new ObjectIdentifier(wsiun2, "del2"); saveObject(user2, wsidel, meta1, makeRefData(leaf2r), reftype, "delws", new Provenance(user2)); ObjectIdentifier delwsoi = new ObjectIdentifier(wsidel, "delws"); saveObject(user2, wsiacc1, MT_META, makeRefData("refedunacc/del1", "refedunacc2/del2"), reftype, "delptr12", new Provenance(user2)); ObjectIdentifier delptr12oi = new ObjectIdentifier(wsiacc1, "delptr12"); saveObject(user2, wsiacc2, MT_META, makeRefData("refedunacc2/del2"), reftype, "delptr2", new Provenance(user2)); ObjectIdentifier delptr2oi = new ObjectIdentifier(wsiacc2, "delptr2"); saveObject(user2, wsiacc2, MT_META, makeRefData("refeddel/delws"), reftype, "delptrws", new Provenance(user2)); ObjectIdentifier delptrwsoi = new ObjectIdentifier(wsiacc2, "delptrws"); ws.setObjectsDeleted(user2, Arrays.asList(del1oi, del2oi), true); ws.setWorkspaceDeleted(user2, wsidel, true); List<WorkspaceObjectData> lwod = ws.getReferencedObjects(user1, Arrays.asList( new ObjectChain(delptr12oi, Arrays.asList(del1oi, leaf1oi)), new ObjectChain(delptr12oi, Arrays.asList(del1oi, leaf2oi)), new ObjectChain(delptr12oi, Arrays.asList(del2oi, leaf1oi)), new ObjectChain(delptrwsoi, Arrays.asList(delwsoi, leaf2oi)), new ObjectChain(delptr12oi, Arrays.asList(del2oi, leaf2oi)), new ObjectChain(delptr2oi, Arrays.asList(del2oi, leaf1oi)), new ObjectChain(delptr2oi, Arrays.asList(del2oi, leaf2oi)) )); assertThat("correct list size", lwod.size(), is(7)); compareObjectAndInfo(lwod.get(0), leaf1, new Provenance(user2), data1, mtlist, mtmap); compareObjectAndInfo(lwod.get(1), leaf2, new Provenance(user2), data2, mtlist, mtmap); compareObjectAndInfo(lwod.get(2), leaf1, new Provenance(user2), data1, mtlist, mtmap); compareObjectAndInfo(lwod.get(3), leaf2, new Provenance(user2), data2, mtlist, mtmap); compareObjectAndInfo(lwod.get(4), leaf2, new Provenance(user2), data2, mtlist, mtmap); compareObjectAndInfo(lwod.get(5), leaf1, new Provenance(user2), data1, mtlist, mtmap); compareObjectAndInfo(lwod.get(6), leaf2, new Provenance(user2), data2, mtlist, mtmap); checkReferencedObject(user1, new ObjectChain(delptr12oi, Arrays.asList(del1oi)), del1, new Provenance(user2), makeRefData(wsidun1 + "/1/1", wsidun2 + "/1/1"), Arrays.asList(wsidun1 + "/1/1", wsidun2 + "/1/1"), mtmap); Map<String, String> provmap = new HashMap<String, String>(); provmap.put(leaf1r, wsidun1 + "/1/1"); provmap.put(leaf2r, wsidun2 + "/1/1"); checkReferencedObject(user1, new ObjectChain(delptr12oi, Arrays.asList(del2oi)), del2, p, makeRefData(), mtlist, provmap); failGetReferencedObjects(user1, Arrays.asList(new ObjectChain(delptr2oi, Arrays.asList(del1oi, leaf1oi))), new NoSuchReferenceException( "The object delptr2 in workspace refedaccessible2 does not contain the reference " + wsidun1 + "/2/1", null, null)); failGetReferencedObjects(user1, Arrays.asList(new ObjectChain(delptr12oi, Arrays.asList(del1oi, unlinkedoi))), new NoSuchReferenceException( "The object del1 in workspace refedunacc does not contain the reference " + wsidun2 + "/2/1", null, null)); failGetReferencedObjects(user1, Arrays.asList(new ObjectChain(delptr12oi, Arrays.asList(del1oi, new ObjectIdentifier(wsiun1, "leaf2")))), new NoSuchObjectException( "No object with name leaf2 exists in workspace " + wsidun1, null, null)); failGetReferencedObjects(user1, Arrays.asList(new ObjectChain(delptr12oi, Arrays.asList(del1oi, new ObjectIdentifier(wsiun1, "leaf1", 2)))), new NoSuchObjectException( "No object with id 1 (name leaf1) and version 2 exists in workspace " + wsidun1, null, null)); failGetReferencedObjects(user2, new ArrayList<ObjectChain>(), new IllegalArgumentException("No object identifiers provided")); failGetReferencedObjects(user2, Arrays.asList(new ObjectChain(new ObjectIdentifier(wsiun1, "leaf3"), Arrays.asList(new ObjectIdentifier(wsiun1, "leaf1")))), new InaccessibleObjectException("No object with name leaf3 exists in workspace " + wsidun1)); failGetReferencedObjects(user1, Arrays.asList(new ObjectChain(new ObjectIdentifier(new WorkspaceIdentifier("fakefakefake"), "leaf1"), Arrays.asList(new ObjectIdentifier(wsiun1, "leaf1")))), new InaccessibleObjectException("Object leaf1 cannot be accessed: No workspace with name fakefakefake exists")); failGetReferencedObjects(user1, Arrays.asList(new ObjectChain(new ObjectIdentifier(wsiun1, "leaf1"), Arrays.asList(new ObjectIdentifier(wsiun1, "leaf1")))), new InaccessibleObjectException("Object leaf1 cannot be accessed: User refedUser may not read workspace refedunacc")); failGetReferencedObjects(null, Arrays.asList(new ObjectChain(new ObjectIdentifier(wsiun1, "leaf1"), Arrays.asList(new ObjectIdentifier(wsiun1, "leaf1")))), new InaccessibleObjectException("Object leaf1 cannot be accessed: Anonymous users may not read workspace refedunacc")); ws.setObjectsDeleted(user2, Arrays.asList(new ObjectIdentifier(wsiun1, "leaf1")), true); failGetReferencedObjects(user2, Arrays.asList(new ObjectChain(new ObjectIdentifier(wsiun1, "leaf1"), Arrays.asList(new ObjectIdentifier(wsiun1, "leaf1")))), new InaccessibleObjectException("Object 1 (name leaf1) in workspace " + wsidun1 + " has been deleted")); ws.setObjectsDeleted(user2, Arrays.asList(new ObjectIdentifier(wsiun1, "leaf1")), false); ws.setWorkspaceDeleted(user2, wsiun1, true); failGetReferencedObjects(user2, Arrays.asList(new ObjectChain(new ObjectIdentifier(wsiun1, "leaf1"), Arrays.asList(new ObjectIdentifier(wsiun1, "leaf1")))), new InaccessibleObjectException("Object leaf1 cannot be accessed: Workspace refedunacc is deleted")); ws.setGlobalPermission(user2, wsiacc2, Permission.NONE); } @Test public void objectChain() throws Exception { WorkspaceIdentifier wsi = new WorkspaceIdentifier("foo"); ObjectIdentifier oi = new ObjectIdentifier(wsi, "thing"); failCreateObjectChain(null, new ArrayList<ObjectIdentifier>(), new IllegalArgumentException("Neither head nor chain can be null")); failCreateObjectChain(oi, null, new IllegalArgumentException("Neither head nor chain can be null")); failCreateObjectChain(oi, new ArrayList<ObjectIdentifier>(), new IllegalArgumentException("Chain cannot be empty")); failCreateObjectChain(oi, Arrays.asList(oi, null, oi), new IllegalArgumentException("Nulls are not allowed in reference chains")); } @Test public void grantRemoveOwnership() throws Exception { WorkspaceUser user = new WorkspaceUser("foo"); String moduleName = "SharedModule"; ws.requestModuleRegistration(user, moduleName); ws.resolveModuleRegistration(moduleName, true); ws.compileNewTypeSpec(user, "module " + moduleName + " {typedef int MainType;};", Arrays.asList("MainType"), null, null, false, null); ws.releaseTypes(user, moduleName); WorkspaceUser user2 = new WorkspaceUser("bar"); try { ws.compileNewTypeSpec(user2, "module " + moduleName + " {typedef string MainType;};", Collections.<String>emptyList(), null, null, false, null); Assert.fail(); } catch (NoSuchPrivilegeException ex) { Assert.assertTrue(ex.getMessage(), ex.getMessage().contains("not in list of owners")); } ws.grantModuleOwnership(moduleName, user2.getUser(), false, user, false); ws.compileNewTypeSpec(user2, "module " + moduleName + " {typedef string MainType;};", Collections.<String>emptyList(), null, null, false, null); WorkspaceUser user3 = new WorkspaceUser("baz"); try { ws.grantModuleOwnership(moduleName, user3.getUser(), false, user2, false); Assert.fail(); } catch (NoSuchPrivilegeException ex) { Assert.assertTrue(ex.getMessage(), ex.getMessage().contains("can not change privileges")); } ws.grantModuleOwnership(moduleName, user2.getUser(), true, user, false); ws.grantModuleOwnership(moduleName, user3.getUser(), false, user2, false); ws.removeModuleOwnership(moduleName, user3.getUser(), user2, false); ws.removeModuleOwnership(moduleName, user2.getUser(), user, false); try { ws.compileNewTypeSpec(user2, "module " + moduleName + " {typedef float MainType;};", Collections.<String>emptyList(), null, null, false, null); Assert.fail(); } catch (NoSuchPrivilegeException ex) { Assert.assertTrue(ex.getMessage(), ex.getMessage().contains("not in list of owners")); } } @Test public void removeTypeTest() throws Exception { WorkspaceUser user = new WorkspaceUser("foo"); String moduleName = "MyMod3"; ws.requestModuleRegistration(user, moduleName); ws.resolveModuleRegistration(moduleName, true); ws.compileNewTypeSpec(user, "module " + moduleName + " {" + "typedef structure {string foo; list<int> bar; int baz;} AType; " + "typedef structure {string whooo;} BType;};", Arrays.asList("AType", "BType"), null, null, false, null); ws.compileTypeSpec(user, moduleName, Collections.<String>emptyList(), Arrays.asList("BType"), Collections.<String, Long>emptyMap(), false); List<Long> vers = ws.getModuleVersions(moduleName, user); Collections.sort(vers); Assert.assertEquals(2, vers.size()); Assert.assertEquals(2, ws.getModuleInfo(user, new ModuleDefId(moduleName, vers.get(0))).getTypes().size()); Assert.assertEquals(1, ws.getModuleInfo(user, new ModuleDefId(moduleName, vers.get(1))).getTypes().size()); Assert.assertEquals(Arrays.asList(vers.get(0)), ws.getModuleVersions(new TypeDefId(moduleName + ".BType", "0.1"), user)); ws.releaseTypes(user, moduleName); Assert.assertEquals(1, ws.getModuleVersions(new TypeDefId(moduleName + ".AType"), null).size()); Assert.assertEquals(moduleName + ".AType-1.0", ws.getTypeInfo(moduleName + ".AType", false, null).getTypeDefId()); } @Test public void admin() throws Exception { assertThat("no admins before adding any", ws.getAdmins(), is((Set<WorkspaceUser>) new HashSet<WorkspaceUser>())); ws.addAdmin(new WorkspaceUser("adminguy")); Set<WorkspaceUser> expected = new HashSet<WorkspaceUser>(); expected.add(new WorkspaceUser("adminguy")); assertThat("correct admins", ws.getAdmins(), is(expected)); assertTrue("correctly detected as admin", ws.isAdmin(new WorkspaceUser("adminguy"))); assertFalse("correctly detected as not an admin", ws.isAdmin(new WorkspaceUser("adminguy2"))); ws.addAdmin(new WorkspaceUser("adminguy2")); expected.add(new WorkspaceUser("adminguy2")); assertThat("correct admins", ws.getAdmins(), is(expected)); assertTrue("correctly detected as admin", ws.isAdmin(new WorkspaceUser("adminguy"))); assertTrue("correctly detected as admin", ws.isAdmin(new WorkspaceUser("adminguy2"))); assertFalse("correctly detected as not an admin", ws.isAdmin(new WorkspaceUser("adminguy3"))); ws.removeAdmin(new WorkspaceUser("adminguy")); expected.remove(new WorkspaceUser("adminguy")); assertThat("correct admins", ws.getAdmins(), is(expected)); assertFalse("correctly detected as not an admin", ws.isAdmin(new WorkspaceUser("adminguy"))); assertTrue("correctly detected as admin", ws.isAdmin(new WorkspaceUser("adminguy2"))); assertFalse("correctly detected as not an admin", ws.isAdmin(new WorkspaceUser("adminguy3"))); } @Test public void getAllWorkspaceOwners() throws Exception { Set<WorkspaceUser> startusers = ws.getAllWorkspaceOwners(); String userprefix = "getAllWorkspaceOwners"; Set<WorkspaceUser> users = new HashSet<WorkspaceUser>(); for (int i = 0; i < 4; i++) { String u = userprefix + i; users.add(new WorkspaceUser(u)); ws.createWorkspace(new WorkspaceUser(u), u + ":" + userprefix, false, null, null); } Set<WorkspaceUser> newusers = ws.getAllWorkspaceOwners(); newusers.removeAll(startusers); assertThat("got correct list of workspace users", newusers, is(users)); } @Test public void sortForMD5() throws Exception { WorkspaceUser user = new WorkspaceUser("md5user"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("sorting"); ws.createWorkspace(user, wsi.getIdentifierString(), false, null, null); Map<String, Object> data = new LinkedHashMap<String, Object>(); data.put("g", 7); data.put("d", 4); data.put("a", 1); data.put("e", 5); data.put("b", 2); data.put("f", 6); data.put("c", 3); String expected = "{\"a\":1,\"b\":2,\"c\":3,\"d\":4,\"e\":5,\"f\":6,\"g\":7}"; String md5 = DigestUtils.md5Hex(expected); assertThat("md5 correct", md5, is("f906e268b16cbfa1c302c6bb51a6b784")); JsonNode savedata = MAPPER.valueToTree(data); Provenance p = new Provenance(new WorkspaceUser("kbasetest2")); List<WorkspaceSaveObject> objects = Arrays.asList( new WorkspaceSaveObject(savedata, SAFE_TYPE1, null, p, false)); List<ObjectInformation> objinfo = ws.saveObjects(user, wsi, objects, getIdFactory()); assertThat("workspace calculated md5 correct", objinfo.get(0).getCheckSum(), is(md5)); objinfo = ws.getObjectInformation(user, Arrays.asList(new ObjectIdentifier(wsi, 1)), false, false); assertThat("workspace calculated md5 correct", objinfo.get(0).getCheckSum(), is(md5)); } @Test public void maxObjectSize() throws Exception { WorkspaceUser user = new WorkspaceUser("MOSuser"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("maxObjectSize"); ws.createWorkspace(user, wsi.getIdentifierString(), false, null, null); Map<String, Object> data = new HashMap<String, Object>(); data.put("foo", "9012345678"); ResourceUsageConfiguration oldcfg = ws.getResourceConfig(); ResourceUsageConfigurationBuilder build = new ResourceUsageConfigurationBuilder(oldcfg); ws.setResourceConfig(build.withMaxObjectSize(20).build()); saveObject(user, wsi, null, data, SAFE_TYPE1, "foo", new Provenance(user)); //should work data.put("foo", "90123456789"); failSave(user, wsi, Arrays.asList( new WorkspaceSaveObject(data, SAFE_TYPE1, null, new Provenance(user), false)), new IllegalArgumentException( "Object #1 data size 21 exceeds limit of 20")); ws.setResourceConfig(oldcfg); } @Test public void maxReturnedObjectSize() throws Exception { TypeDefId reftype = new TypeDefId(new TypeDefName("CopyRev", "RefType"), 1, 0); WorkspaceUser user = new WorkspaceUser("MROSuser"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("maxReturnedObjectSize"); ws.createWorkspace(user, wsi.getIdentifierString(), false, null, null); Map<String, Object> data = new HashMap<String, Object>(); data.put("fo", "90"); data.put("ba", "3"); saveObject(user, wsi, null, data, SAFE_TYPE1, "foo", new Provenance(user)); ObjectIdentifier oi1 = new ObjectIdentifier(wsi, "foo", 1); saveObject(user, wsi, null, data, SAFE_TYPE1, "foo2", new Provenance(user)); ObjectIdentifier oi2 = new ObjectIdentifier(wsi, "foo2", 1); List<ObjectIdentifier> oi1l = Arrays.asList(oi1); List<ObjectIdentifier> oi2l = Arrays.asList(oi2); Map<String, Object> refdata = new HashMap<String, Object>(); refdata.put("refs", Arrays.asList(wsi.getName() + "/foo/1")); saveObject(user, wsi, null, refdata, reftype, "ref", new Provenance(user)); refdata.put("refs", Arrays.asList(wsi.getName() + "/foo2/1")); saveObject(user, wsi, null, refdata, reftype, "ref2", new Provenance(user)); ObjectIdentifier ref = new ObjectIdentifier(wsi, "ref", 1); ObjectIdentifier ref2 = new ObjectIdentifier(wsi, "ref2", 1); List<ObjectChain> refchain = Arrays.asList(new ObjectChain(ref, oi1l)); List<ObjectChain> refchain2 = Arrays.asList(new ObjectChain(ref, oi1l), new ObjectChain(ref2, oi2l)); ResourceUsageConfiguration oldcfg = ws.getResourceConfig(); ResourceUsageConfigurationBuilder build = new ResourceUsageConfigurationBuilder( oldcfg).withMaxObjectSize(1); ws.setResourceConfig(build.withMaxReturnedDataSize(20).build()); List<SubObjectIdentifier> ois1l = Arrays.asList(new SubObjectIdentifier(oi1, new ObjectPaths(Arrays.asList("/fo")))); List<SubObjectIdentifier> ois1lmt = Arrays.asList(new SubObjectIdentifier(oi1, new ObjectPaths(new ArrayList<String>()))); successGetObjects(user, oi1l); ws.getObjectsSubSet(user, ois1l); ws.getObjectsSubSet(user, ois1lmt); ws.getReferencedObjects(user, refchain); ws.setResourceConfig(build.withMaxReturnedDataSize(19).build()); String errstr = "Too much data requested from the workspace at once; data requested " + "including potential subsets is %sB which exceeds maximum of %s."; IllegalArgumentException err = new IllegalArgumentException(String.format(errstr, 20, 19)); failGetObjects(user, oi1l, err, true); failGetSubset(user, ois1l, err); failGetSubset(user, ois1lmt, err); failGetReferencedObjects(user, refchain, err); ws.setResourceConfig(build.withMaxReturnedDataSize(40).build()); List<ObjectIdentifier> two = Arrays.asList(oi1, oi2); List<SubObjectIdentifier> ois1l2 = Arrays.asList( new SubObjectIdentifier(oi1, new ObjectPaths(Arrays.asList("/fo"))), new SubObjectIdentifier(oi1, new ObjectPaths(Arrays.asList("/ba")))); List<SubObjectIdentifier> bothoi = Arrays.asList( new SubObjectIdentifier(oi1, new ObjectPaths(Arrays.asList("/fo"))), new SubObjectIdentifier(oi2, new ObjectPaths(Arrays.asList("/ba")))); successGetObjects(user, two); ws.getObjectsSubSet(user, ois1l2); ws.getObjectsSubSet(user, bothoi); ws.getReferencedObjects(user, refchain2); ws.setResourceConfig(build.withMaxReturnedDataSize(39).build()); err = new IllegalArgumentException(String.format(errstr, 40, 39)); failGetObjects(user, two, err, true); failGetSubset(user, ois1l2, err); failGetSubset(user, bothoi, err); failGetReferencedObjects(user, refchain2, err); List<SubObjectIdentifier> all = new LinkedList<SubObjectIdentifier>(); all.addAll(ois1l2); all.addAll(bothoi); ws.setResourceConfig(build.withMaxReturnedDataSize(60).build()); ws.getObjectsSubSet(user, all); ws.setResourceConfig(build.withMaxReturnedDataSize(59).build()); err = new IllegalArgumentException(String.format(errstr, 60, 59)); failGetSubset(user, all, err); ws.setResourceConfig(oldcfg); } @Test public void useFileVsMemoryForData() throws Exception { WorkspaceUser user = new WorkspaceUser("sortfilemem"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("sortFileMem"); ws.createWorkspace(user, wsi.getIdentifierString(), false, null, null); Map<String, Object> data1 = new LinkedHashMap<String, Object>(); data1.put("z", 1); data1.put("y", 2); Provenance p = new Provenance(user); List<WorkspaceSaveObject> objs = new ArrayList<WorkspaceSaveObject>(); objs.add(new WorkspaceSaveObject(data1, SAFE_TYPE1, null, p, false)); final int[] filesCreated = {0}; TempFileListener listener = new TempFileListener() { @Override public void createdTempFile(File f) { filesCreated[0]++; } }; ws.getTempFilesManager().addListener(listener); ws.getTempFilesManager().cleanup(); //these tests don't clean up after each test ResourceUsageConfiguration oldcfg = ws.getResourceConfig(); ResourceUsageConfigurationBuilder build = new ResourceUsageConfigurationBuilder(oldcfg); //single file stays in memory ws.setResourceConfig(build.withMaxIncomingDataMemoryUsage(13).build()); ws.saveObjects(user, wsi, objs, getIdFactory()); assertThat("created no temp files on save", filesCreated[0], is(0)); ws.setResourceConfig(build.withMaxReturnedDataMemoryUsage(13).build()); ObjectIdentifier oi = new ObjectIdentifier(wsi, 1); ws.getObjects(user, Arrays.asList(oi)); assertThat("created no temp files on get", filesCreated[0], is(0)); ws.getObjectsSubSet(user, Arrays.asList(new SubObjectIdentifier(oi, new ObjectPaths(Arrays.asList("z"))))).get(0).getDataAsTokens().destroy(); assertThat("created 1 temp file on get subdata", filesCreated[0], is(1)); JSONRPCLayerTester.assertNoTempFilesExist(ws.getTempFilesManager()); //files go to disk except for small subdata filesCreated[0] = 0; ws.setResourceConfig(build.withMaxIncomingDataMemoryUsage(12).build()); ws.saveObjects(user, wsi, objs, getIdFactory()); assertThat("created temp files on save", filesCreated[0], is(2)); JSONRPCLayerTester.assertNoTempFilesExist(ws.getTempFilesManager()); filesCreated[0] = 0; ws.setResourceConfig(build.withMaxReturnedDataMemoryUsage(12).build()); oi = new ObjectIdentifier(wsi, 2); ws.getObjects(user, Arrays.asList(oi)).get(0).getDataAsTokens().destroy(); assertThat("created 1 temp files on get", filesCreated[0], is(1)); JSONRPCLayerTester.assertNoTempFilesExist(ws.getTempFilesManager()); filesCreated[0] = 0; ws.getObjectsSubSet(user, Arrays.asList(new SubObjectIdentifier(oi, new ObjectPaths(Arrays.asList("z"))))).get(0).getDataAsTokens().destroy(); assertThat("created 1 temp files on get subdata part object", filesCreated[0], is(1)); JSONRPCLayerTester.assertNoTempFilesExist(ws.getTempFilesManager()); filesCreated[0] = 0; ws.getObjectsSubSet(user, Arrays.asList(new SubObjectIdentifier(oi, new ObjectPaths(Arrays.asList("z", "y"))))).get(0).getDataAsTokens().destroy(); assertThat("created 2 temp files on get subdata full object", filesCreated[0], is(2)); JSONRPCLayerTester.assertNoTempFilesExist(ws.getTempFilesManager()); // test with multiple objects Map<String, Object> data2 = new LinkedHashMap<String, Object>(); data2.put("w", 1); data2.put("f", 2); //already sorted so no temp files will be created Map<String, Object> data3 = new LinkedHashMap<String, Object>(); data3.put("x", 1); data3.put("z", 2); objs.add(new WorkspaceSaveObject(data2, SAFE_TYPE1, null, p, false)); objs.add(new WorkspaceSaveObject(data3, SAFE_TYPE1, null, p, false)); //multiple objects in memory filesCreated[0] = 0; ws.setResourceConfig(build.withMaxIncomingDataMemoryUsage(39).build()); ws.saveObjects(user, wsi, objs, getIdFactory()); assertThat("created no temp files on save", filesCreated[0], is(0)); ws.setResourceConfig(build.withMaxReturnedDataMemoryUsage(39).build()); List<ObjectIdentifier> ois = Arrays.asList(new ObjectIdentifier(wsi, 3), new ObjectIdentifier(wsi, 4), new ObjectIdentifier(wsi, 5)); for (WorkspaceObjectData wod: ws.getObjects(user, ois)) { wod.getDataAsTokens().destroy(); } assertThat("created no temp files on get", filesCreated[0], is(0)); JSONRPCLayerTester.assertNoTempFilesExist(ws.getTempFilesManager()); //multiple objects to file ws.setResourceConfig(build.withMaxIncomingDataMemoryUsage(38).build()); filesCreated[0] = 0; ws.saveObjects(user, wsi, objs, getIdFactory()); //two files per data - 1 for relabeling, 1 for sort assertThat("created temp files on save", filesCreated[0], is(4)); JSONRPCLayerTester.assertNoTempFilesExist(ws.getTempFilesManager()); filesCreated[0] = 0; ws.setResourceConfig(build.withMaxReturnedDataMemoryUsage(38).build()); for (WorkspaceObjectData wod: ws.getObjects(user, ois)) { wod.getDataAsTokens().destroy(); } assertThat("created 1 temp files on get", filesCreated[0], is(1)); JSONRPCLayerTester.assertNoTempFilesExist(ws.getTempFilesManager()); filesCreated[0] = 0; ws.setResourceConfig(build.withMaxReturnedDataMemoryUsage(25).build()); for (WorkspaceObjectData wod: ws.getObjects(user, ois)) { wod.getDataAsTokens().destroy(); } assertThat("created 2 temp files on get", filesCreated[0], is(2)); JSONRPCLayerTester.assertNoTempFilesExist(ws.getTempFilesManager()); ws.getTempFilesManager().removeListener(listener); ws.setResourceConfig(oldcfg); } @Test public void storedDataIsSorted() throws Exception { WorkspaceUser user = new WorkspaceUser("dataIsSorted"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("dataissorted"); ws.createWorkspace(user, wsi.getIdentifierString(), false, null, null); Map<String, Object> data1 = new LinkedHashMap<String, Object>(); data1.put("z", 1); data1.put("y", 2); String expected = "{\"y\":2,\"z\":1}"; Provenance p = new Provenance(user); List<WorkspaceSaveObject> objs = new ArrayList<WorkspaceSaveObject>(); objs.add(new WorkspaceSaveObject(data1, SAFE_TYPE1, null, p, false)); ws.saveObjects(user, wsi, objs, getIdFactory()); WorkspaceObjectData o = ws.getObjects( user, Arrays.asList(new ObjectIdentifier(wsi, 1))).get(0); String data = IOUtils.toString(o.getDataAsTokens().getJSON()); assertThat("data is sorted", data, is(expected)); assertThat("data marked as sorted", o.getDataAsTokens().isSorted(), is(true)); } @Test public void exceedSortMemory() throws Exception { WorkspaceUser user = new WorkspaceUser("exceedSortMem"); WorkspaceIdentifier wsi = new WorkspaceIdentifier("exceedsortmem"); ws.createWorkspace(user, wsi.getIdentifierString(), false, null, null); Provenance p = new Provenance(user); List<WorkspaceSaveObject> objs = new ArrayList<WorkspaceSaveObject>(); String safejson = "{\"z\":\"a\"}"; String json = "{\"z\":\"a\",\"b\":\"d\"}"; objs.add(new WorkspaceSaveObject(new JsonTokenStream(safejson), SAFE_TYPE1, null, p, false)); objs.add(new WorkspaceSaveObject(new JsonTokenStream(json), SAFE_TYPE1, null, p, false)); ResourceUsageConfiguration oldcfg = ws.getResourceConfig(); ResourceUsageConfigurationBuilder build = new ResourceUsageConfigurationBuilder(oldcfg) .withMaxIncomingDataMemoryUsage(1); int maxmem = 8 + 64 + 8 + 64; ws.setResourceConfig(build.withMaxRelabelAndSortMemoryUsage(maxmem).build()); ws.saveObjects(user, wsi, objs, getIdFactory()); ws.setResourceConfig(build.withMaxRelabelAndSortMemoryUsage(maxmem - 1).build()); try { ws.saveObjects(user, wsi, objs, getIdFactory()); fail("sorted w/ too little mem"); } catch (TypedObjectValidationException tove) { assertThat("got correct exception", tove.getMessage(), is("Object #2: Memory necessary for sorting map keys exceeds the limit " + (maxmem - 1) + " bytes at /")); } ws.setResourceConfig(oldcfg); } }
Test for extracted metdata w/ large value
src/us/kbase/workspace/test/workspace/WorkspaceTest.java
Test for extracted metdata w/ large value
Java
epl-1.0
1041155f083337deefcaff6c71be54ef747862e8
0
Beagle-PSE/Beagle,Beagle-PSE/Beagle,Beagle-PSE/Beagle
package de.uka.ipd.sdq.beagle.gui; import de.uka.ipd.sdq.beagle.core.UserConfiguration; import org.eclipse.jface.wizard.WizardPage; import org.eclipse.swt.SWT; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Label; import java.util.Iterator; /** * A page of {@link BeagleAnalysisWizard} allowing the user to choose which of the * components they selected will be be analysed. Leaving out some components is possible, * adding new ones isn't. * * @author Christoph Michelbach */ public class SelectionOverviewWizardPage extends WizardPage { /** * The title of this page. */ private static final String TITLE = "Selection Overview"; /** * The description of this page. */ private static final String DESCRIPTION = "Make sure you want to analyse the elements listed below."; /** * The number of columns of the layout of container which contains the entire content * of this page. */ private static final int MAIN_LAYOUT_NR_COLUMS = 1; /** * The {@link UserConfiguration} this {@link SelectionOverviewWizardPage} uses. */ private final UserConfiguration userConfiguration; /** * The main container. */ private Composite mainContainer; /** * Constructs a new {@link SelectionOverviewWizardPage} being linked to the given * {@code userConfiguration}. * * @param userConfiguration The {@link UserConfiguration} this * {@link SelectionOverviewWizardPage} will be permanently linked to. * Changing the associated {@link UserConfiguration} is not possible. */ public SelectionOverviewWizardPage(final UserConfiguration userConfiguration) { super(TITLE); setTitle(TITLE); setDescription(DESCRIPTION); this.userConfiguration = userConfiguration; } @Override public void createControl(final Composite parent) { this.mainContainer = new Composite(parent, SWT.NONE); final GridLayout layout = new GridLayout(); this.mainContainer.setLayout(layout); layout.numColumns = MAIN_LAYOUT_NR_COLUMS; final Label labelHeader = new Label(this.mainContainer, SWT.NONE); labelHeader.setText("These elements are selected for analysis:"); final Iterator<String> iterator = this.userConfiguration.getComponents().iterator(); while (iterator.hasNext()) { final String component = iterator.next(); final Label labelItem = new Label(this.mainContainer, SWT.NONE); labelItem.setText(" • Component: " + component); } // required to avoid an error in the system setControl(this.mainContainer); setPageComplete(true); } }
GUI/src/main/java/de/uka/ipd/sdq/beagle/gui/SelectionOverviewWizardPage.java
package de.uka.ipd.sdq.beagle.gui; import de.uka.ipd.sdq.beagle.core.UserConfiguration; import org.eclipse.jface.wizard.WizardPage; import org.eclipse.swt.SWT; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Label; import java.util.Iterator; /** * A page of {@link BeagleAnalysisWizard} allowing the user to choose which of the * components they selected will be be analysed. Leaving out some components is possible, * adding new ones isn't. * * @author Christoph Michelbach */ public class SelectionOverviewWizardPage extends WizardPage { /** * The title of this page. */ private static final String TITLE = "Selection Overview"; /** * The description of this page. */ private static final String DESCRIPTION = "Make sure you want to anaylse the components listed below."; /** * The number of columns of the layout of container which contains the entire content * of this page. */ private static final int MAIN_LAYOUT_NR_COLUMS = 1; /** * The {@link UserConfiguration} this {@link SelectionOverviewWizardPage} uses. */ private final UserConfiguration userConfiguration; /** * The main container. */ private Composite mainContainer; /** * Constructs a new {@link SelectionOverviewWizardPage} being linked to the given * {@code userConfiguration}. * * @param userConfiguration The {@link UserConfiguration} this * {@link SelectionOverviewWizardPage} will be permanently linked to. * Changing the associated {@link UserConfiguration} is not possible. */ public SelectionOverviewWizardPage(final UserConfiguration userConfiguration) { super(TITLE); setTitle(TITLE); setDescription(DESCRIPTION); this.userConfiguration = userConfiguration; } @Override public void createControl(final Composite parent) { this.mainContainer = new Composite(parent, SWT.NONE); final GridLayout layout = new GridLayout(); this.mainContainer.setLayout(layout); layout.numColumns = MAIN_LAYOUT_NR_COLUMS; final Label labelHeader = new Label(this.mainContainer, SWT.NONE); labelHeader.setText("These components are selected for anaylsis:"); final Iterator<String> iterator = this.userConfiguration.getComponents().iterator(); while (iterator.hasNext()) { final String component = iterator.next(); final Label labelItem = new Label(this.mainContainer, SWT.NONE); labelItem.setText(" • Component: " + component); } // required to avoid an error in the system setControl(this.mainContainer); setPageComplete(true); } }
Fixed sentences.
GUI/src/main/java/de/uka/ipd/sdq/beagle/gui/SelectionOverviewWizardPage.java
Fixed sentences.
Java
agpl-3.0
a0da26bbdfa5e90afe395e513969c13fbef6f9ab
0
elki-project/elki,elki-project/elki,elki-project/elki
package de.lmu.ifi.dbs.evaluation.procedure; import de.lmu.ifi.dbs.algorithm.Algorithm; import de.lmu.ifi.dbs.data.MetricalObject; import de.lmu.ifi.dbs.database.Database; import de.lmu.ifi.dbs.evaluation.Evaluation; import de.lmu.ifi.dbs.evaluation.holdout.Holdout; /** * An evaluation procedure evaluates a specified algorithm * based on a range of pairs of training and test sets. * However, test sets may remain empty for certain evaluation scenarios, * e.g. for clustering algorithms of some sort. * * @author Arthur Zimek (<a href="mailto:[email protected]">[email protected]</a>) */ public interface EvaluationProcedure<M extends MetricalObject,A extends Algorithm<M>> { public static final String ILLEGAL_STATE = "EvaluationProcedure has not been properly prepared to perform an evaluation."; public void setTime(boolean time); public void setVerbose(boolean verbose); /** * Sets the specified training and test set. * * * @param training the database to train an algorithm * @param test the database to test an algorithm */ public void set(Database<M> training, Database<M> test); /** * The given database can be splitted as specified * by a certain holdout procedure. The partitions are set * as training and test sets for the evaluation procedure. * * * @param data the database to prepare holdouts from * @param holdout the holdout procedure */ public void set(Database<M> data, Holdout<M> holdout); /** * Evaluates an algorithm. * * * @param algorithm the algorithm to evaluate * @return the evaluation of the specified algorithm * based on the previously specified training and test sets * @throws IllegalStateException if a holdout is required to set * before calling this method */ public Evaluation<M,A> evaluate(A algorithm) throws IllegalStateException; /** * Provides a description of the used holdout. * * * @return a description of the used holdout */ public String setting(); }
src/de/lmu/ifi/dbs/evaluation/procedure/EvaluationProcedure.java
package de.lmu.ifi.dbs.evaluation.procedure; import de.lmu.ifi.dbs.algorithm.Algorithm; import de.lmu.ifi.dbs.data.MetricalObject; import de.lmu.ifi.dbs.database.Database; import de.lmu.ifi.dbs.evaluation.Evaluation; import de.lmu.ifi.dbs.evaluation.holdout.Holdout; /** * An evaluation procedure evaluates a specified algorithm * based on a range of pairs of training and test sets. * However, test sets may remain empty for certain evaluation scenarios, * e.g. for clustering algorithms of some sort. * * @author Arthur Zimek (<a href="mailto:[email protected]">[email protected]</a>) */ public interface EvaluationProcedure<M extends MetricalObject,A extends Algorithm<M>> { public void setTime(boolean time); public void setVerbose(boolean verbose); /** * Sets the specified training and test set. * * * @param training the database to train an algorithm * @param test the database to test an algorithm */ public void set(Database<M> training, Database<M> test); /** * The given database can be splitted as specified * by a certain holdout procedure. The partitions are set * as training and test sets for the evaluation procedure. * * * @param data the database to prepare holdouts from * @param holdout the holdout procedure */ public void set(Database<M> data, Holdout<M> holdout); /** * Evaluates an algorithm. * * * @param algorithm the algorithm to evaluate * @return the evaluation of the specified algorithm * based on the previously specified training and test sets * @throws IllegalStateException if a holdout is required to set * before calling this method */ public Evaluation<M,A> evaluate(A algorithm) throws IllegalStateException; /** * Provides a description of the used holdout. * * * @return a description of the used holdout */ public String setting(); }
Exception message Illegal State
src/de/lmu/ifi/dbs/evaluation/procedure/EvaluationProcedure.java
Exception message Illegal State
Java
lgpl-2.1
a88326d9088ad96bc860e5973d14ac2bd500c027
0
Distrotech/oRTP,Distrotech/oRTP,carpikes/ortp,carpikes/ortp,videomedicine/oRTP,VTCSecureLLC/ortp,avis/ortp,VTCSecureLLC/ortp,carpikes/ortp,Linphone-sync/oRTP,avis/ortp,jiangjianping/ortp,videomedicine/oRTP,samueljero/linphone-oRTP,dmonakhov/ortp,dozeo/ortp,wugh7125/ortp,caizw/ortp,VTCSecureLLC/ortp,caizw/ortp,dmonakhov/ortp,dmonakhov/ortp,samueljero/linphone-oRTP,Linphone-sync/oRTP,caizw/ortp,Distrotech/oRTP,dozeo/ortp,avis/ortp,jiangjianping/ortp,jiangjianping/ortp,wugh7125/ortp,videomedicine/oRTP,wugh7125/ortp,dozeo/ortp,Linphone-sync/oRTP,samueljero/linphone-oRTP
/* p2pproxy Copyright (C) 2007 Jehan Monnier () P2pProxyMain.java - main class. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ package org.linphone.p2pproxy.core; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.lang.management.ManagementFactory; import java.net.URI; import java.net.URL; import java.util.InvalidPropertiesFormatException; import java.util.Properties; import javax.management.ObjectName; import net.jxta.exception.JxtaException; import org.apache.log4j.Logger; import org.apache.log4j.PropertyConfigurator; import org.linphone.p2pproxy.api.P2pProxyException; import org.linphone.p2pproxy.api.P2pProxyManagement; import org.linphone.p2pproxy.api.P2pProxyNotReadyException; import org.linphone.p2pproxy.api.P2pProxyResourceManagement; import org.linphone.p2pproxy.api.P2pProxyUserAlreadyExistException; import org.linphone.p2pproxy.core.media.MediaResourceService; import org.linphone.p2pproxy.core.sipproxy.SipProxyRegistrar; import org.zoolu.sip.provider.SipStack; import org.linphone.p2pproxy.launcher.P2pProxylauncherConstants; public class P2pProxyMain implements P2pProxyMainMBean { private static Logger mLog = null; private static JxtaNetworkManager mJxtaNetworkManager; private static ServiceProvider mServiceProvider; private static P2pProxyManagement mP2pProxyManagement; private static SipProxyRegistrar mSipAndPipeListener; private static P2pProxyAccountManagementMBean mP2pProxyAccountManagement; private static P2pProxyResourceManagement mP2pProxySipProxyRegistrarManagement; public final static String ACCOUNT_MGR_MBEAN_NAME="org.linphone.p2proxy:type=account-manager"; public final static String PROXY_REG_MBEAN_NAME="org.linphone.p2proxy:type=proxy-registrar"; public final static String MAIN_MBEAN_NAME="org.linphone.p2proxy:type=main"; private static P2pProxyMain mP2pProxyMain = new P2pProxyMain(); private static Configurator mConfigurator; private static String mConfigHomeDir; static private boolean mExit = false; static private boolean isReady = false; static { // System.setProperty("com.sun.management.jmxremote", "true"); // System.setProperty("com.sun.management.jmxremote.port", "6789"); // System.setProperty("com.sun.management.jmxremote.authenticate", "false"); // System.setProperty("com.sun.management.jmxremote.ssl", "false"); } /** * @param args * @throws P2pProxyException * @throws InterruptedException * @throws JxtaException * @throws IOException * @throws FileNotFoundException * @throws InvalidPropertiesFormatException */ public static void main(String[] args) { try { mConfigHomeDir=System.getProperty("user.home")+"/.p2pproxy"; int lsipPort=5040; int lMediaPort=MediaResourceService.AUDIO_VIDEO_LOCAL_PORT_DEFAULT_VALUE; int lP2pPort = 9701; JxtaNetworkManager.Mode lMode = JxtaNetworkManager.Mode.auto; // setup logging // get config dire first for (int i=0; i < args.length; i=i+2) { String argument = args[i]; if (argument.equals("-jxta")) { mConfigHomeDir = args[i + 1]; File lFile = new File(mConfigHomeDir); if (lFile.exists() == false) lFile.mkdir(); System.out.println("mConfigHomeDir detected[" + mConfigHomeDir + "]"); } } System.setProperty("org.linphone.p2pproxy.home", mConfigHomeDir); System.setProperty("net.jxta.logging.Logging", "FINEST"); System.setProperty("net.jxta.level", "FINEST"); mP2pProxyMain.loadTraceConfigFile(); mLog.info("p2pproxy initilizing..."); File lPropertyFile = new File(mConfigHomeDir+"/p2pproxy.properties.xml"); mConfigurator = new Configurator(lPropertyFile); try { ObjectName lObjectName = new ObjectName(MAIN_MBEAN_NAME); ManagementFactory.getPlatformMBeanServer().registerMBean(mP2pProxyMain,lObjectName); } catch (Exception e) { mLog.warn("cannot register MBean",e); } // get other params for (int i=0; i < args.length; i=i+2) { String argument = args[i]; if (argument.equals("-jxta") || argument.equals("-home")) { mConfigHomeDir = args[i + 1]; //nop } else if (argument.equals("-sip")) { lsipPort = Integer.parseInt(args[i + 1]); System.out.println("sipPort detected[" + lsipPort + "]"); mConfigurator.setProperty(SipProxyRegistrar.REGISTRAR_PORT, Integer.toString(lsipPort)); } else if (argument.equals("-media")) { lMediaPort = Integer.parseInt(args[i + 1]); System.out.println("media detected[" + lMediaPort + "]"); mConfigurator.setProperty(MediaResourceService.AUDIO_VIDEO_LOCAL_PORT, Integer.toString(lMediaPort)); } else if (argument.equals("-p2p")) { lP2pPort = Integer.parseInt(args[i + 1]); System.out.println("p2p port detected[" + lP2pPort + "]"); mConfigurator.setProperty(JxtaNetworkManager.TCP_LISTENING_PORT, Integer.toString(lP2pPort)); } else if (argument.equals("-relay")) { lMode = JxtaNetworkManager.Mode.relay; mConfigurator.setProperty(JxtaNetworkManager.MODE, lMode.name()); System.out.println("relay mode detected"); i--; } else if (argument.equals("-edge-only")) { lMode = JxtaNetworkManager.Mode.edge; mConfigurator.setProperty(JxtaNetworkManager.MODE, lMode.name()); System.out.println("edge only mode detected"); i--; }else if (argument.equals("-seeding-server")) { lMode = JxtaNetworkManager.Mode.seeding_server; mConfigurator.setProperty(JxtaNetworkManager.MODE, lMode.name()); System.out.println("seeding-server detected"); i--; } else if (argument.equals("-auto-config")) { lMode = JxtaNetworkManager.Mode.auto; mConfigurator.setProperty(JxtaNetworkManager.MODE, lMode.name()); System.out.println("auto-mode mode detected"); i--; } else if (argument.equals("-seeding-rdv")) { mConfigurator.setProperty(JxtaNetworkManager.SEEDING_RDV, args[i + 1]); System.out.println("seeding rdv detected[" + args[i + 1] + "]"); } else if (argument.equals("-seeding-relay")) { mConfigurator.setProperty(JxtaNetworkManager.SEEDING_RELAY, args[i + 1]); System.out.println("seeding relay detected[" + args[i + 1] + "]"); } else if (argument.equals("-seeding")) { mConfigurator.setProperty(JxtaNetworkManager.SEEDING_RDV, args[i + 1]); mConfigurator.setProperty(JxtaNetworkManager.SEEDING_RELAY, args[i + 1]); System.out.println("seeding detected[" + args[i + 1] + "]"); } else if (argument.equals("-public-address")) { mConfigurator.setProperty(JxtaNetworkManager.HTTP_LISTENING_PUBLIC_ADDRESS,args[i + 1]+":9700"); mConfigurator.setProperty(JxtaNetworkManager.TCP_LISTENING_PUBLIC_ADDRESS,args[i + 1]+":"+lP2pPort); mConfigurator.setProperty(MediaResourceService.AUDIO_VIDEO_PUBLIC_URI,"udp://"+args[i + 1]+":"+lMediaPort); mConfigurator.setProperty(SipProxyRegistrar.REGISTRAR_PUBLIC_ADDRESS,args[i + 1]); System.out.println("public address detected[" + args[i + 1] + "]"); } else { System.out.println("Invalid option: " + args[i]); usage(); System.exit(1); } } File lJxtaDirectory = new File (mConfigHomeDir); if (lJxtaDirectory.exists() == false) lJxtaDirectory.mkdir(); switch (lMode) { case edge: startEdge(mConfigurator,lJxtaDirectory); break; case relay: startRelay(mConfigurator,lJxtaDirectory); break; case seeding_server: startSeeding(mConfigurator,lJxtaDirectory); break; case auto: //1 start edge startEdge(mConfigurator,lJxtaDirectory); // check if peer mode required if (mP2pProxyManagement.shouldIBehaveAsAnRdv() == true) { String lPublicAddress = mP2pProxyManagement.getPublicIpAddress().getHostAddress(); mConfigurator.setProperty(JxtaNetworkManager.HTTP_LISTENING_PUBLIC_ADDRESS, lPublicAddress+":9700"); mConfigurator.setProperty(JxtaNetworkManager.TCP_LISTENING_PUBLIC_ADDRESS, lPublicAddress+":9701"); mServiceProvider.stop(); mJxtaNetworkManager.stop(); startRelay(mConfigurator,lJxtaDirectory); mJxtaNetworkManager.getPeerGroup().getRendezVousService().setAutoStart(true); } break; default: mLog.fatal("unsupported mode ["+lMode+"]"); System.exit(1); } //set management try { ObjectName lObjectName = new ObjectName(ACCOUNT_MGR_MBEAN_NAME); ManagementFactory.getPlatformMBeanServer().registerMBean(mP2pProxyAccountManagement,lObjectName); } catch (Exception e) { mLog.warn("cannot register MBean",e); } mLog.warn("p2pproxy initilized"); isReady = true; while (mExit == false) { Thread.sleep(1000); } if (mServiceProvider!= null) mServiceProvider.stop(); if (mServiceProvider!= null) mServiceProvider.stop(); if (mSipAndPipeListener!= null) mSipAndPipeListener.stop(); if (mJxtaNetworkManager != null) mJxtaNetworkManager.stop(); mLog.info("p2pproxy stopped"); return; } catch (Exception e) { mLog.fatal("error",e); System.exit(1); } } private static void startEdge(Configurator aProperties,File aConfigDir) throws Exception{ // setup jxta mJxtaNetworkManager = new JxtaNetworkManager(aProperties,aConfigDir); mServiceProvider = new EdgePeerServiceManager(aProperties, mJxtaNetworkManager); mP2pProxyManagement = (P2pProxyManagement) mServiceProvider; mP2pProxySipProxyRegistrarManagement = (P2pProxyResourceManagement) mServiceProvider; //setup account manager mP2pProxyAccountManagement = new P2pProxyAccountManagement(mJxtaNetworkManager); mServiceProvider.start(3000L); } private static void startRelay(Configurator aProperties,File aConfigDir) throws Exception{ // setup jxta mJxtaNetworkManager = new JxtaNetworkManager(aProperties,aConfigDir); mServiceProvider = new SuperPeerServiceManager(aProperties, mJxtaNetworkManager); mP2pProxyManagement = (P2pProxyManagement) mServiceProvider; mP2pProxySipProxyRegistrarManagement = (P2pProxyResourceManagement) mServiceProvider; mServiceProvider.start(3000L); //setup account manager mP2pProxyAccountManagement = new P2pProxyAccountManagement(mJxtaNetworkManager); // setup sip provider SipStack.log_path = mConfigHomeDir+"/logs"; mSipAndPipeListener = new SipProxyRegistrar(mConfigurator,mJxtaNetworkManager,mP2pProxyAccountManagement); //set management try { ObjectName lObjectName = new ObjectName(PROXY_REG_MBEAN_NAME); ManagementFactory.getPlatformMBeanServer().registerMBean(mSipAndPipeListener,lObjectName); } catch (Exception e) { mLog.warn("cannot register MBean",e); } } private static void startSeeding(Configurator aProperties,File aConfigDir) throws Exception{ // setup jxta mJxtaNetworkManager = new JxtaNetworkManager(aProperties,aConfigDir); mServiceProvider = new SeedingPeerServiceManager(aProperties, mJxtaNetworkManager,true); mP2pProxyManagement = null; mP2pProxySipProxyRegistrarManagement = (P2pProxyResourceManagement) mServiceProvider; mServiceProvider.start(3000L); //setup account manager mP2pProxyAccountManagement = new P2pProxyAccountManagement(mJxtaNetworkManager); // setup sip provider SipStack.log_path = mConfigHomeDir+"/logs"; mSipAndPipeListener = new SipProxyRegistrar(mConfigurator,mJxtaNetworkManager,mP2pProxyAccountManagement); //set management try { ObjectName lObjectName = new ObjectName(PROXY_REG_MBEAN_NAME); ManagementFactory.getPlatformMBeanServer().registerMBean(mSipAndPipeListener,lObjectName); } catch (Exception e) { mLog.warn("cannot register MBean",e); } } private static void usage() { System.out.println("p2pproxy"); System.out.println("-home : directory where configuration/cache is located (including jxta cache.default is $HOME/.p2pproxy"); System.out.println("-sip : udp proxy port, default 5060"); System.out.println("-media : udp relay/stun port, default 16000"); System.out.println("-p2p : p2p tcp port, default 9701"); System.out.println("-relay : super peer mode"); System.out.println("-edge-only : edge mode"); System.out.println("-seeding-server : seeding server mode"); System.out.println("-auto-config : automatically choose edge or relay (default mode)"); System.out.println("-seeding : list of boostrap rdv separated by | (ex tcp://127.0.0.1:9701|http://127.0.0.2:9700)"); System.out.println("-public-address : ip as exported to peers (ex myPublicAddress.no-ip.org)"); } public void loadTraceConfigFile() throws P2pProxyException { staticLoadTraceConfigFile(); } public static void staticLoadTraceConfigFile() throws P2pProxyException { try { InputStream lLog4jStream = null; String lSearchDir; //search build dir lSearchDir = System.getProperty("org.linphone.p2pproxy.build.dir"); File lFile = new File(lSearchDir+"/log4j.properties"); if (lFile.exists() == false) { lSearchDir = mConfigHomeDir; lFile = new File(lSearchDir+"/log4j.properties"); if (lFile.exists() == false) { lSearchDir="."; lFile = new File(lSearchDir+"/log4j.properties"); if (lFile.exists() == false) { lLog4jStream = Thread.currentThread().getContextClassLoader().getResourceAsStream("log4j.properties"); } } } if (lLog4jStream == null) { lLog4jStream = new FileInputStream(lFile); } Properties llog4Properties = new Properties(); llog4Properties.load(lLog4jStream); PropertyConfigurator.configure(llog4Properties); mLog = Logger.getLogger(P2pProxyMain.class); // read java.util.logging properties llog4Properties.setProperty("java.util.logging.FileHandler.pattern",System.getProperty("org.linphone.p2pproxy.home")+"/logs/p2pproxy.log"); File lLogConfigFile = new File(mConfigHomeDir.concat("log4j.properties")+".tmp"); if (lLogConfigFile.exists() == false) { lLogConfigFile.createNewFile(); } llog4Properties.store(new FileOutputStream(lLogConfigFile), "tmp"); System.setProperty("java.util.logging.config.file",lLogConfigFile.getAbsolutePath()); java.util.logging.LogManager.getLogManager().readConfiguration(); } catch (Exception e) { throw new P2pProxyException("enable to load traces",e); } } private static void isReady() throws P2pProxyNotReadyException { try { if ((isReady == true && mJxtaNetworkManager.isConnectedToRendezVous(0) == true) || (isReady == true && mJxtaNetworkManager.getPeerGroup().getRendezVousService().isRendezVous())) { //nop connected } else { if (mJxtaNetworkManager != null ) { throw new P2pProxyNotReadyException("not connected to any rdv: status ["+mJxtaNetworkManager.getPeerGroup().getRendezVousService().getRendezVousStatus()+"]"); } else { throw new P2pProxyNotReadyException("initializing"); } } } catch (InterruptedException e) { throw new P2pProxyNotReadyException(e); } } /* p2pproxy.h implementation*/ public static int createAccount(String aUserName) { try { isReady(); mP2pProxyAccountManagement.createAccount(aUserName); } catch (P2pProxyUserAlreadyExistException e) { return P2pProxylauncherConstants.P2PPROXY_ACCOUNTMGT_USER_EXIST; } catch (P2pProxyException e) { return P2pProxylauncherConstants.P2PPROXY_ERROR; } return P2pProxylauncherConstants.P2PPROXY_NO_ERROR; } public static int deleteAccount(String aUserName) { try { isReady(); mP2pProxyAccountManagement.deleteAccount(aUserName); } catch (P2pProxyException e) { return P2pProxylauncherConstants.P2PPROXY_ERROR; } return P2pProxylauncherConstants.P2PPROXY_NO_ERROR; } public static int isValidAccount(String aUserName){ try { isReady(); if (mP2pProxyAccountManagement.isValidAccount(aUserName)) { return P2pProxylauncherConstants.P2PPROXY_ACCOUNTMGT_USER_EXIST; } else { return P2pProxylauncherConstants.P2PPROXY_ACCOUNTMGT_USER_NOT_EXIST; } } catch (P2pProxyException e) { return P2pProxylauncherConstants.P2PPROXY_ERROR; } } public static String lookupSipProxyUri(String aDomaine) { try { isReady(); String[] lProxies = mP2pProxySipProxyRegistrarManagement.lookupSipProxiesUri(aDomaine); if (lProxies.length != 0) { return lProxies[0]; } else { return null; } } catch (Exception e) { return null; } } public static String[] lookupSipProxiesUri(String aDomaine) { try { isReady(); return mP2pProxySipProxyRegistrarManagement.lookupSipProxiesUri(aDomaine); } catch (Exception e) { return null; } } public static String[] lookupMediaServerAddress(String aDomaine) { try { isReady(); return mP2pProxySipProxyRegistrarManagement.getMediaServerList(); } catch (Exception e) { mLog.error("cannot find media resource",e); return null; } } public static int getState() { try { isReady(); return P2pProxylauncherConstants.P2PPROXY_CONNECTED; } catch (P2pProxyException e) { if (mLog != null) mLog.error("cannot get state",e); return P2pProxylauncherConstants.P2PPROXY_NOT_CONNECTED; } } public static int revokeSipProxy(String aProxy) { try { isReady(); mP2pProxySipProxyRegistrarManagement.revokeSipProxy(aProxy); return P2pProxylauncherConstants.P2PPROXY_NO_ERROR; } catch (P2pProxyException e) { return P2pProxylauncherConstants.P2PPROXY_NOT_CONNECTED; } } public static int revokeMediaServer(String aServer) { try { isReady(); mP2pProxySipProxyRegistrarManagement.revokeMediaServer(aServer); return P2pProxylauncherConstants.P2PPROXY_NO_ERROR; } catch (P2pProxyException e) { return P2pProxylauncherConstants.P2PPROXY_NOT_CONNECTED; } } public static void stop() { mExit = true; } }
p2pproxy/src/org/linphone/p2pproxy/core/P2pProxyMain.java
/* p2pproxy Copyright (C) 2007 Jehan Monnier () P2pProxyMain.java - main class. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ package org.linphone.p2pproxy.core; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.lang.management.ManagementFactory; import java.net.URI; import java.net.URL; import java.util.InvalidPropertiesFormatException; import java.util.Properties; import javax.management.ObjectName; import net.jxta.exception.JxtaException; import org.apache.log4j.Logger; import org.apache.log4j.PropertyConfigurator; import org.linphone.p2pproxy.api.P2pProxyException; import org.linphone.p2pproxy.api.P2pProxyManagement; import org.linphone.p2pproxy.api.P2pProxyNotReadyException; import org.linphone.p2pproxy.api.P2pProxyResourceManagement; import org.linphone.p2pproxy.api.P2pProxyUserAlreadyExistException; import org.linphone.p2pproxy.core.media.MediaResourceService; import org.linphone.p2pproxy.core.sipproxy.SipProxyRegistrar; import org.zoolu.sip.provider.SipStack; import org.linphone.p2pproxy.launcher.P2pProxylauncherConstants; public class P2pProxyMain implements P2pProxyMainMBean { private static Logger mLog = null; private static JxtaNetworkManager mJxtaNetworkManager; private static ServiceProvider mServiceProvider; private static P2pProxyManagement mP2pProxyManagement; private static SipProxyRegistrar mSipAndPipeListener; private static P2pProxyAccountManagementMBean mP2pProxyAccountManagement; private static P2pProxyResourceManagement mP2pProxySipProxyRegistrarManagement; public final static String ACCOUNT_MGR_MBEAN_NAME="org.linphone.p2proxy:type=account-manager"; public final static String PROXY_REG_MBEAN_NAME="org.linphone.p2proxy:type=proxy-registrar"; public final static String MAIN_MBEAN_NAME="org.linphone.p2proxy:type=main"; private static P2pProxyMain mP2pProxyMain = new P2pProxyMain(); private static Configurator mConfigurator; private static String mConfigHomeDir; static private boolean mExit = false; static private boolean isReady = false; static { // System.setProperty("com.sun.management.jmxremote", "true"); // System.setProperty("com.sun.management.jmxremote.port", "6789"); // System.setProperty("com.sun.management.jmxremote.authenticate", "false"); // System.setProperty("com.sun.management.jmxremote.ssl", "false"); } /** * @param args * @throws P2pProxyException * @throws InterruptedException * @throws JxtaException * @throws IOException * @throws FileNotFoundException * @throws InvalidPropertiesFormatException */ public static void main(String[] args) { try { mConfigHomeDir=System.getProperty("user.home")+"/.p2pproxy"; int lsipPort=5040; int lMediaPort=MediaResourceService.AUDIO_VIDEO_LOCAL_PORT_DEFAULT_VALUE; int lP2pPort = 9701; JxtaNetworkManager.Mode lMode = JxtaNetworkManager.Mode.auto; // setup logging // get config dire first for (int i=0; i < args.length; i=i+2) { String argument = args[i]; if (argument.equals("-jxta")) { mConfigHomeDir = args[i + 1]; File lFile = new File(mConfigHomeDir); if (lFile.exists() == false) lFile.mkdir(); System.out.println("mConfigHomeDir detected[" + mConfigHomeDir + "]"); } } System.setProperty("org.linphone.p2pproxy.home", mConfigHomeDir); System.setProperty("net.jxta.logging.Logging", "FINEST"); System.setProperty("net.jxta.level", "FINEST"); mP2pProxyMain.loadTraceConfigFile(); mLog.info("p2pproxy initilizing..."); File lPropertyFile = new File(mConfigHomeDir+"/p2pproxy.properties.xml"); mConfigurator = new Configurator(lPropertyFile); try { ObjectName lObjectName = new ObjectName(MAIN_MBEAN_NAME); ManagementFactory.getPlatformMBeanServer().registerMBean(mP2pProxyMain,lObjectName); } catch (Exception e) { mLog.warn("cannot register MBean",e); } // get other params for (int i=0; i < args.length; i=i+2) { String argument = args[i]; if (argument.equals("-jxta") || argument.equals("-home")) { mConfigHomeDir = args[i + 1]; //nop } else if (argument.equals("-sip")) { lsipPort = Integer.parseInt(args[i + 1]); System.out.println("sipPort detected[" + lsipPort + "]"); mConfigurator.setProperty(SipProxyRegistrar.REGISTRAR_PORT, Integer.toString(lsipPort)); } else if (argument.equals("-media")) { lMediaPort = Integer.parseInt(args[i + 1]); System.out.println("media detected[" + lMediaPort + "]"); mConfigurator.setProperty(MediaResourceService.AUDIO_VIDEO_LOCAL_PORT, Integer.toString(lMediaPort)); } else if (argument.equals("-p2p")) { lP2pPort = Integer.parseInt(args[i + 1]); System.out.println("p2p port detected[" + lP2pPort + "]"); mConfigurator.setProperty(JxtaNetworkManager.TCP_LISTENING_PORT, Integer.toString(lP2pPort)); } else if (argument.equals("-relay")) { lMode = JxtaNetworkManager.Mode.relay; mConfigurator.setProperty(JxtaNetworkManager.MODE, lMode.name()); System.out.println("relay mode detected"); i--; } else if (argument.equals("-edge-only")) { lMode = JxtaNetworkManager.Mode.edge; mConfigurator.setProperty(JxtaNetworkManager.MODE, lMode.name()); System.out.println("edge only mode detected"); i--; }else if (argument.equals("-seeding-server")) { lMode = JxtaNetworkManager.Mode.seeding_server; mConfigurator.setProperty(JxtaNetworkManager.MODE, lMode.name()); System.out.println("seeding-server detected"); i--; } else if (argument.equals("-auto-config")) { lMode = JxtaNetworkManager.Mode.auto; mConfigurator.setProperty(JxtaNetworkManager.MODE, lMode.name()); System.out.println("auto-mode mode detected"); i--; } else if (argument.equals("-seeding-rdv")) { mConfigurator.setProperty(JxtaNetworkManager.SEEDING_RDV, args[i + 1]); System.out.println("seeding rdv detected[" + args[i + 1] + "]"); } else if (argument.equals("-seeding-relay")) { mConfigurator.setProperty(JxtaNetworkManager.SEEDING_RELAY, args[i + 1]); System.out.println("seeding relay detected[" + args[i + 1] + "]"); } else if (argument.equals("-seeding")) { mConfigurator.setProperty(JxtaNetworkManager.SEEDING_RDV, args[i + 1]); mConfigurator.setProperty(JxtaNetworkManager.SEEDING_RELAY, args[i + 1]); System.out.println("seeding detected[" + args[i + 1] + "]"); } else if (argument.equals("-public-address")) { mConfigurator.setProperty(JxtaNetworkManager.HTTP_LISTENING_PUBLIC_ADDRESS,args[i + 1]+":9700"); mConfigurator.setProperty(JxtaNetworkManager.TCP_LISTENING_PUBLIC_ADDRESS,args[i + 1]+":"+lP2pPort); mConfigurator.setProperty(MediaResourceService.AUDIO_VIDEO_PUBLIC_URI,"udp://"+args[i + 1]+":"+lMediaPort); mConfigurator.setProperty(SipProxyRegistrar.REGISTRAR_PUBLIC_ADDRESS,args[i + 1]); System.out.println("public address detected[" + args[i + 1] + "]"); } else { System.out.println("Invalid option: " + args[i]); usage(); System.exit(1); } } File lJxtaDirectory = new File (mConfigHomeDir); if (lJxtaDirectory.exists() == false) lJxtaDirectory.mkdir(); switch (lMode) { case edge: startEdge(mConfigurator,lJxtaDirectory); break; case relay: startRelay(mConfigurator,lJxtaDirectory); break; case seeding_server: startSeeding(mConfigurator,lJxtaDirectory); break; case auto: //1 start edge startEdge(mConfigurator,lJxtaDirectory); // check if peer mode required if (mP2pProxyManagement.shouldIBehaveAsAnRdv() == true) { String lPublicAddress = mP2pProxyManagement.getPublicIpAddress().getHostAddress(); mConfigurator.setProperty(JxtaNetworkManager.HTTP_LISTENING_PUBLIC_ADDRESS, lPublicAddress+":9700"); mConfigurator.setProperty(JxtaNetworkManager.TCP_LISTENING_PUBLIC_ADDRESS, lPublicAddress+":9701"); mServiceProvider.stop(); mJxtaNetworkManager.stop(); startRelay(mConfigurator,lJxtaDirectory); mJxtaNetworkManager.getPeerGroup().getRendezVousService().setAutoStart(true); } break; default: mLog.fatal("unsupported mode ["+lMode+"]"); System.exit(1); } //set management try { ObjectName lObjectName = new ObjectName(ACCOUNT_MGR_MBEAN_NAME); ManagementFactory.getPlatformMBeanServer().registerMBean(mP2pProxyAccountManagement,lObjectName); } catch (Exception e) { mLog.warn("cannot register MBean",e); } mLog.warn("p2pproxy initilized"); isReady = true; while (mExit == false) { Thread.sleep(1000); } if (mServiceProvider!= null) mServiceProvider.stop(); if (mServiceProvider!= null) mServiceProvider.stop(); if (mSipAndPipeListener!= null) mSipAndPipeListener.stop(); if (mJxtaNetworkManager != null) mJxtaNetworkManager.stop(); mLog.info("p2pproxy stopped"); return; } catch (Exception e) { mLog.fatal("error",e); System.exit(1); } } private static void startEdge(Configurator aProperties,File aConfigDir) throws Exception{ // setup jxta mJxtaNetworkManager = new JxtaNetworkManager(aProperties,aConfigDir); mServiceProvider = new EdgePeerServiceManager(aProperties, mJxtaNetworkManager); mP2pProxyManagement = (P2pProxyManagement) mServiceProvider; mP2pProxySipProxyRegistrarManagement = (P2pProxyResourceManagement) mServiceProvider; //setup account manager mP2pProxyAccountManagement = new P2pProxyAccountManagement(mJxtaNetworkManager); mServiceProvider.start(3000L); } private static void startRelay(Configurator aProperties,File aConfigDir) throws Exception{ // setup jxta mJxtaNetworkManager = new JxtaNetworkManager(aProperties,aConfigDir); mServiceProvider = new SuperPeerServiceManager(aProperties, mJxtaNetworkManager); mP2pProxyManagement = (P2pProxyManagement) mServiceProvider; mP2pProxySipProxyRegistrarManagement = (P2pProxyResourceManagement) mServiceProvider; mServiceProvider.start(3000L); //setup account manager mP2pProxyAccountManagement = new P2pProxyAccountManagement(mJxtaNetworkManager); // setup sip provider SipStack.log_path = mConfigHomeDir+"/logs"; mSipAndPipeListener = new SipProxyRegistrar(mConfigurator,mJxtaNetworkManager,mP2pProxyAccountManagement); //set management try { ObjectName lObjectName = new ObjectName(PROXY_REG_MBEAN_NAME); ManagementFactory.getPlatformMBeanServer().registerMBean(mSipAndPipeListener,lObjectName); } catch (Exception e) { mLog.warn("cannot register MBean",e); } } private static void startSeeding(Configurator aProperties,File aConfigDir) throws Exception{ // setup jxta mJxtaNetworkManager = new JxtaNetworkManager(aProperties,aConfigDir); mServiceProvider = new SeedingPeerServiceManager(aProperties, mJxtaNetworkManager,true); mP2pProxyManagement = null; mP2pProxySipProxyRegistrarManagement = (P2pProxyResourceManagement) mServiceProvider; mServiceProvider.start(3000L); //setup account manager mP2pProxyAccountManagement = new P2pProxyAccountManagement(mJxtaNetworkManager); // setup sip provider SipStack.log_path = mConfigHomeDir+"/logs"; mSipAndPipeListener = new SipProxyRegistrar(mConfigurator,mJxtaNetworkManager,mP2pProxyAccountManagement); //set management try { ObjectName lObjectName = new ObjectName(PROXY_REG_MBEAN_NAME); ManagementFactory.getPlatformMBeanServer().registerMBean(mSipAndPipeListener,lObjectName); } catch (Exception e) { mLog.warn("cannot register MBean",e); } } private static void usage() { System.out.println("p2pproxy"); System.out.println("-home : directory where configuration/cache is located (including jxta cache.default is $HOME/.p2pproxy"); System.out.println("-sip : udp proxy port, default 5060"); System.out.println("-media : udp relay/stun port, default 16000"); System.out.println("-p2p : p2p tcp port, default 9701"); System.out.println("-relay : super peer mode"); System.out.println("-edge-only : edge mode"); System.out.println("-seeding-server : seeding server mode"); System.out.println("-auto-config : automatically choose edge or relay (default mode)"); System.out.println("-seeding : list of boostrap rdv separated by | (ex tcp://127.0.0.1:9701|http://127.0.0.2:9700)"); System.out.println("-public-address : ip as exported to peers (ex myPublicAddress.no-ip.org)"); } public void loadTraceConfigFile() throws P2pProxyException { staticLoadTraceConfigFile(); } public static void staticLoadTraceConfigFile() throws P2pProxyException { try { InputStream lLog4jStream = null; String lSearchDir; //search build dir lSearchDir = System.getProperty("org.linphone.p2pproxy.build.dir"); File lFile = new File(lSearchDir+"/log4j.properties"); if (lFile.exists() == false) { lSearchDir = mConfigHomeDir; lFile = new File(lSearchDir+"/log4j.properties"); if (lFile.exists() == false) { lSearchDir="."; lFile = new File(lSearchDir+"/log4j.properties"); if (lFile.exists() == false) { lLog4jStream = Thread.currentThread().getContextClassLoader().getResourceAsStream("log4j.properties"); } } } if (lLog4jStream == null) { lLog4jStream = new FileInputStream(lFile); } Properties llog4Properties = new Properties(); llog4Properties.load(lLog4jStream); PropertyConfigurator.configure(llog4Properties); mLog = Logger.getLogger(P2pProxyMain.class); // read java.util.logging properties llog4Properties.setProperty("java.util.logging.FileHandler.pattern",System.getProperty("org.linphone.p2pproxy.home")+"/logs/p2pproxy.log"); File lLogConfigFile = new File(mConfigHomeDir.concat("log4j.properties")+".tmp"); if (lLogConfigFile.exists() == false) { lLogConfigFile.createNewFile(); } llog4Properties.store(new FileOutputStream(lLogConfigFile), "tmp"); System.setProperty("java.util.logging.config.file",lLogConfigFile.getAbsolutePath()); java.util.logging.LogManager.getLogManager().readConfiguration(); } catch (Exception e) { throw new P2pProxyException("enable to load traces",e); } } private static void isReady() throws P2pProxyNotReadyException { try { if ((isReady == true && mJxtaNetworkManager.isConnectedToRendezVous(0) == true) || (isReady == true && mJxtaNetworkManager.getPeerGroup().getRendezVousService().isRendezVous())) { //nop connected } else { if (mJxtaNetworkManager != null ) { throw new P2pProxyNotReadyException("not connected to any rdv: status ["+mJxtaNetworkManager.getPeerGroup().getRendezVousService().getRendezVousStatus()+"]"); } else { throw new P2pProxyNotReadyException("initializing"); } } } catch (InterruptedException e) { throw new P2pProxyNotReadyException(e); } } /* p2pproxy.h implementation*/ public static int createAccount(String aUserName) { try { isReady(); mP2pProxyAccountManagement.createAccount(aUserName); } catch (P2pProxyUserAlreadyExistException e) { return P2pProxylauncherConstants.P2PPROXY_ACCOUNTMGT_USER_EXIST; } catch (P2pProxyException e) { return P2pProxylauncherConstants.P2PPROXY_ERROR; } return P2pProxylauncherConstants.P2PPROXY_NO_ERROR; } public static int deleteAccount(String aUserName) { try { isReady(); mP2pProxyAccountManagement.deleteAccount(aUserName); } catch (P2pProxyException e) { return P2pProxylauncherConstants.P2PPROXY_ERROR; } return P2pProxylauncherConstants.P2PPROXY_NO_ERROR; } public static int isValidAccount(String aUserName){ try { isReady(); if (mP2pProxyAccountManagement.isValidAccount(aUserName)) { return P2pProxylauncherConstants.P2PPROXY_ACCOUNTMGT_USER_EXIST; } else { return P2pProxylauncherConstants.P2PPROXY_ACCOUNTMGT_USER_NOT_EXIST; } } catch (P2pProxyException e) { return P2pProxylauncherConstants.P2PPROXY_ERROR; } } public static String lookupSipProxyUri(String aDomaine) { try { isReady(); String[] lProxies = mP2pProxySipProxyRegistrarManagement.lookupSipProxiesUri(aDomaine); if (lProxies.length != 0) { return lProxies[0]; } else { return null; } } catch (Exception e) { return null; } } public static String[] lookupSipProxiesUri(String aDomaine) { try { isReady(); return mP2pProxySipProxyRegistrarManagement.lookupSipProxiesUri(aDomaine); } catch (Exception e) { return null; } } public static String[] lookupMediaServerAddress(String aDomaine) { try { isReady(); return mP2pProxySipProxyRegistrarManagement.getMediaServerList(); } catch (Exception e) { mLog.error("cannot find media resource",e); return null; } } public static int getState() { try { isReady(); return P2pProxylauncherConstants.P2PPROXY_CONNECTED; } catch (P2pProxyException e) { mLog.error("cannot get state",e); return P2pProxylauncherConstants.P2PPROXY_NOT_CONNECTED; } } public static int revokeSipProxy(String aProxy) { try { isReady(); mP2pProxySipProxyRegistrarManagement.revokeSipProxy(aProxy); return P2pProxylauncherConstants.P2PPROXY_NO_ERROR; } catch (P2pProxyException e) { return P2pProxylauncherConstants.P2PPROXY_NOT_CONNECTED; } } public static int revokeMediaServer(String aServer) { try { isReady(); mP2pProxySipProxyRegistrarManagement.revokeMediaServer(aServer); return P2pProxylauncherConstants.P2PPROXY_NO_ERROR; } catch (P2pProxyException e) { return P2pProxylauncherConstants.P2PPROXY_NOT_CONNECTED; } } public static void stop() { mExit = true; } }
protect getstate against early call to tracing subsystem git-svn-id: 8f60b0cb95795e7f2da4f2192af0e1f6ffac8291@372 3f6dc0c8-ddfe-455d-9043-3cd528dc4637
p2pproxy/src/org/linphone/p2pproxy/core/P2pProxyMain.java
protect getstate against early call to tracing subsystem
Java
lgpl-2.1
1a27c00243e8923e6b7cf6467b2edea9f3818674
0
levants/lightmare
package org.lightmare.deploy.management; import java.io.File; import java.io.IOException; import java.io.Serializable; import java.io.Writer; import java.util.List; import javax.servlet.Servlet; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.lightmare.deploy.fs.Watcher; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.StringUtils; /** * {@link Servlet} to manage deployed applications * * @author levan * */ @WebServlet("/DeployManager") public class DeployManager extends HttpServlet { private static final long serialVersionUID = 1L; public static final String DEPLOY_MANAGER_DEFAULT_NAME = "/DeployManager"; // html tags private static final String BEGIN_TAGS = "<tr><td><a name = \""; private static final String NAME_OF_TAGS = "\" href=\"#\"\">"; private static final String END_NAME_TAGS = "</a></td>\n"; private static final String END_TAGS = "</td><td><a href = \"DeployManager\">reload</a></td></tr>"; private static final String REDEPLOY_START_TAG = "<td><a name = \""; private static final String REDEPLOY_TYPE_TAG = "\" href=\"#\" onClick=\"sendRequest(this.name, '"; private static final String REDEPLOY_FILE_TYPE_TAG = "', '"; private static final String REDEPLOY_NAME_TAG = "')\">"; private static final String REDEPLOY_END_TAG = "</a></td>"; private static final String BEGIN_PAGE = StringUtils .concat("<html>\n", "\t<head><script type=\"text/javascript\">\n", "/* <![CDATA[ */\n", "\t\tfunction sendRequest(redeploy, type, fileType){\n ", "\t\t\tvar xmlhttp = new XMLHttpRequest();\n ", "\t\t\tvar reqUrl = \"DeployManager?file=\" + redeploy + \"&type=\" + type + \"&fileType=\" + fileType;\n", "\t\t\txmlhttp.open(\"GET\", reqUrl, true);\n", "\t\t\txmlhttp.send();\n", "}\n", "/* ]]> */\n", "</script>\n", "\t<title>Deployment management</title>", "</head>\n", "\t<body>\n", "\t<table>\n"); private static final String TYPE_TAG = "\t\t<tr><td><br><b>"; private static final String END_TYPE_TAG = "</b></br></td></tr>\n"; private static final String END_PAGE = "</body></table>\n </html>"; private static final String LOGIN_PAGE = StringUtils .concat("<html>\n", "\t\t<head>\n", "\t\t\t<title>Login</title>\n", "\t\t</head>\n", "\t\t<body>\n", "\t\t\t\t\t\t<br><form name = \"ManagementLogin\" method=\"post\">", "\t\t\t\t\t\t\t<br><input type=\"user\" name=\"user\"></br>", "\t\t\t\t\t\t\t<br><input type=\"password\" name=\"password\"></br>", "\t\t\t\t\t\t\t<br><input type=\"submit\" value=\"Submit\"></br>", "\t\t\t\t\t\t</form></br>\n"); private static final String INCORRECT_MESSAGE = "<br><b>invalid user name / passowd</b></br>"; private static final String END_LOGIN_PAGE = "</html>"; private static final String DEPLOYMENTS = "deployments"; private static final String DATA_SOURCES = "datasources"; // HTTP parameters private static final String REDEPLOY_PARAM_NAME = "file"; private static final String TYPE_PARAM_NAME = "type"; private static final String REDEPLOY_TYPE = "redeploy"; private static final String UNDEPLOY_TYPE = "undeploy"; protected static final String FILE_TYPE_PARAMETER_NAME = "fileType"; private static final String APP_DEPLOYMENT_TYPE = "application"; private static final String DTS_DEPLOYMENT_TYPE = "datasource"; private static final String USER_PARAMETER_NAME = "user"; private static final String PASS_PARAMETER_NAME = "password"; private static final String DEPLOY_PASS_KEY = "deploy_manager_pass"; // Security for deploy management private Security security; /** * Class to cache authenticated users for {@link DeployManager} java * {@link javax.servlet.http.HttpServlet} page * * @author levan * */ private static class DeployPass implements Serializable { private static final long serialVersionUID = 1L; private String userName; } private String getApplications() { List<File> apps = Watcher.listDeployments(); List<File> dss = Watcher.listDataSources(); StringBuilder builder = new StringBuilder(); builder.append(BEGIN_PAGE); builder.append(TYPE_TAG); builder.append(DEPLOYMENTS); builder.append(END_TYPE_TAG); String tag; if (ObjectUtils.available(apps)) { for (File app : apps) { tag = getTag(app.getPath(), APP_DEPLOYMENT_TYPE); builder.append(tag); } } builder.append(BEGIN_PAGE); builder.append(TYPE_TAG); builder.append(DATA_SOURCES); builder.append(END_TYPE_TAG); if (ObjectUtils.available(dss)) { for (File ds : dss) { tag = getTag(ds.getPath(), DTS_DEPLOYMENT_TYPE); builder.append(tag); } } builder.append(END_PAGE); return builder.toString(); } private void fillDeployType(StringBuilder builder, String app, String type, String fileType) { builder.append(REDEPLOY_START_TAG); builder.append(app); builder.append(REDEPLOY_TYPE_TAG); builder.append(type); builder.append(REDEPLOY_FILE_TYPE_TAG); builder.append(fileType); builder.append(REDEPLOY_NAME_TAG); builder.append(type); builder.append(REDEPLOY_END_TAG); } private String getTag(String app, String fileType) { StringBuilder builder = new StringBuilder(); builder.append(BEGIN_TAGS); builder.append(app); builder.append(NAME_OF_TAGS); builder.append(app); builder.append(END_NAME_TAGS); fillDeployType(builder, app, UNDEPLOY_TYPE, fileType); fillDeployType(builder, app, REDEPLOY_TYPE, fileType); builder.append(END_TAGS); return builder.toString(); } private String toLoginPage(boolean incorrect) { StringBuilder builder = new StringBuilder(); builder.append(LOGIN_PAGE); if (incorrect) { builder.append(INCORRECT_MESSAGE); } builder.append(END_LOGIN_PAGE); return builder.toString(); } private boolean authenticate(String userName, String password, HttpSession session) { boolean valid = security.authenticate(userName, password); if (valid) { DeployPass pass = new DeployPass(); pass.userName = userName; session.setAttribute(DEPLOY_PASS_KEY, pass); } return valid; } private boolean check(HttpSession session) { boolean valid = ObjectUtils.notNull(session); if (valid) { Object pass = session.getAttribute(DEPLOY_PASS_KEY); valid = ObjectUtils.notNull(pass); if (valid) { valid = (pass instanceof DeployPass) && (ObjectUtils.available(((DeployPass) pass).userName)); } else { valid = security.check(); } } return valid; } @Override public void init() throws ServletException { try { security = new Security(); } catch (IOException ex) { } super.init(); } @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { boolean check = check(request.getSession(Boolean.FALSE)); String html; if (check) { String fileName = request.getParameter(REDEPLOY_PARAM_NAME); String type = request.getParameter(TYPE_PARAM_NAME); if (ObjectUtils.available(fileName)) { if (type == null || REDEPLOY_TYPE.equals(type)) { Watcher.redeployFile(fileName); } else if (UNDEPLOY_TYPE.equals(type)) { Watcher.undeployFile(fileName); } } html = getApplications(); } else { html = toLoginPage(Boolean.FALSE); } Writer writer = response.getWriter(); try { writer.write(html); } finally { writer.close(); } } @Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String userName = request.getParameter(USER_PARAMETER_NAME); String password = request.getParameter(PASS_PARAMETER_NAME); boolean valid = ObjectUtils.available(userName) && ObjectUtils.available(password); if (valid) { valid = authenticate(userName, password, request.getSession(Boolean.TRUE)); } if (valid) { response.sendRedirect("DeployManager"); } else { String html = toLoginPage(Boolean.TRUE); Writer writer = response.getWriter(); try { writer.write(html); } finally { writer.close(); } } } }
src/main/java/org/lightmare/deploy/management/DeployManager.java
package org.lightmare.deploy.management; import java.io.File; import java.io.IOException; import java.io.Serializable; import java.io.Writer; import java.util.List; import javax.servlet.Servlet; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.lightmare.deploy.fs.Watcher; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.StringUtils; /** * {@link Servlet} to manage deployed applications * * @author levan * */ @WebServlet("/DeployManager") public class DeployManager extends HttpServlet { private static final long serialVersionUID = 1L; public static final String DEPLOY_MANAGER_DEFAULT_NAME = "/DeployManager"; // html tags private static final String BEGIN_TAGS = "<tr><td><a name = \""; private static final String NAME_OF_TAGS = "\" href=\"#\"\">"; private static final String END_NAME_TAGS = "</a></td>\n"; private static final String END_TAGS = "</td><td><a href = \"DeployManager\">reload</a></td></tr>"; private static final String REDEPLOY_START_TAG = "<td><a name = \""; private static final String REDEPLOY_TYPE_TAG = "\" href=\"#\" onClick=\"sendRequest(this.name, '"; private static final String REDEPLOY_FILE_TYPE_TAG = "', '"; private static final String REDEPLOY_NAME_TAG = "')\">"; private static final String REDEPLOY_END_TAG = "</a></td>"; private static final String BEGIN_PAGE = StringUtils .concat("<html>\n", "\t<head><script type=\"text/javascript\">\n", "/* <![CDATA[ */\n", "\t\tfunction sendRequest(redeploy, type, fileType){\n ", "\t\t\tvar xmlhttp = new XMLHttpRequest();\n ", "\t\t\tvar reqUrl = \"DeployManager?file=\" + redeploy + \"&type=\" + type + \"&fileType=\" + fileType;\n", "\t\t\txmlhttp.open(\"GET\", reqUrl, true);\n", "\t\t\txmlhttp.send();\n", "}\n", "/* ]]> */\n", "</script>\n", "\t<title>Deployment management</title>", "</head>\n", "\t<body>\n", "\t<table>\n"); private static final String TYPE_TAG = "\t\t<tr><td><br><b>"; private static final String END_TYPE_TAG = "</b></br></td></tr>\n"; private static final String END_PAGE = "</body></table>\n </html>"; private static final String LOGIN_PAGE = StringUtils .concat("<html>\n", "\t\t<head>\n", "\t\t\t<title>Login</title>\n", "\t\t</head>\n", "\t\t<body>\n", "\t\t\t\t\t\t<br><form name = \"ManagementLogin\" method=\"post\">", "\t\t\t\t\t\t\t<br><input type=\"user\" name=\"user\"></br>", "\t\t\t\t\t\t\t<br><input type=\"password\" name=\"password\"></br>", "\t\t\t\t\t\t\t<br><input type=\"submit\" value=\"Submit\"></br>", "\t\t\t\t\t\t</form></br>\n"); private static final String INCORRECT_MESSAGE = "<br><b>invalid user name / passowd</b></br>"; private static final String END_LOGIN_PAGE = "</html>"; private static final String DEPLOYMENTS = "deployments"; private static final String DATA_SOURCES = "datasources"; // HTTP parameters private static final String REDEPLOY_PARAM_NAME = "file"; private static final String TYPE_PARAM_NAME = "type"; private static final String REDEPLOY_TYPE = "redeploy"; private static final String UNDEPLOY_TYPE = "undeploy"; protected static final String FILE_TYPE_PARAMETER_NAME = "fileType"; private static final String APP_DEPLOYMENT_TYPE = "application"; private static final String DTS_DEPLOYMENT_TYPE = "datasource"; private static final String USER_PARAMETER_NAME = "user"; private static final String PASS_PARAMETER_NAME = "password"; private static final String DEPLOY_PASS_KEY = "deploy_manager_pass"; // Security for deploy management private Security security; /** * Class to cache authenticated users for {@link DeployManager} servlet page * * @author levan * */ private static class DeployPass implements Serializable { private static final long serialVersionUID = 1L; private String userName; } private String getApplications() { List<File> apps = Watcher.listDeployments(); List<File> dss = Watcher.listDataSources(); StringBuilder builder = new StringBuilder(); builder.append(BEGIN_PAGE); builder.append(TYPE_TAG); builder.append(DEPLOYMENTS); builder.append(END_TYPE_TAG); String tag; if (ObjectUtils.available(apps)) { for (File app : apps) { tag = getTag(app.getPath(), APP_DEPLOYMENT_TYPE); builder.append(tag); } } builder.append(BEGIN_PAGE); builder.append(TYPE_TAG); builder.append(DATA_SOURCES); builder.append(END_TYPE_TAG); if (ObjectUtils.available(dss)) { for (File ds : dss) { tag = getTag(ds.getPath(), DTS_DEPLOYMENT_TYPE); builder.append(tag); } } builder.append(END_PAGE); return builder.toString(); } private void fillDeployType(StringBuilder builder, String app, String type, String fileType) { builder.append(REDEPLOY_START_TAG); builder.append(app); builder.append(REDEPLOY_TYPE_TAG); builder.append(type); builder.append(REDEPLOY_FILE_TYPE_TAG); builder.append(fileType); builder.append(REDEPLOY_NAME_TAG); builder.append(type); builder.append(REDEPLOY_END_TAG); } private String getTag(String app, String fileType) { StringBuilder builder = new StringBuilder(); builder.append(BEGIN_TAGS); builder.append(app); builder.append(NAME_OF_TAGS); builder.append(app); builder.append(END_NAME_TAGS); fillDeployType(builder, app, UNDEPLOY_TYPE, fileType); fillDeployType(builder, app, REDEPLOY_TYPE, fileType); builder.append(END_TAGS); return builder.toString(); } private String toLoginPage(boolean incorrect) { StringBuilder builder = new StringBuilder(); builder.append(LOGIN_PAGE); if (incorrect) { builder.append(INCORRECT_MESSAGE); } builder.append(END_LOGIN_PAGE); return builder.toString(); } private boolean authenticate(String userName, String password, HttpSession session) { boolean valid = security.authenticate(userName, password); if (valid) { DeployPass pass = new DeployPass(); pass.userName = userName; session.setAttribute(DEPLOY_PASS_KEY, pass); } return valid; } private boolean check(HttpSession session) { boolean valid = ObjectUtils.notNull(session); if (valid) { Object pass = session.getAttribute(DEPLOY_PASS_KEY); valid = ObjectUtils.notNull(pass); if (valid) { valid = (pass instanceof DeployPass) && (ObjectUtils.available(((DeployPass) pass).userName)); } else { valid = security.check(); } } return valid; } @Override public void init() throws ServletException { try { security = new Security(); } catch (IOException ex) { } super.init(); } @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { boolean check = check(request.getSession(Boolean.FALSE)); String html; if (check) { String fileName = request.getParameter(REDEPLOY_PARAM_NAME); String type = request.getParameter(TYPE_PARAM_NAME); if (ObjectUtils.available(fileName)) { if (type == null || REDEPLOY_TYPE.equals(type)) { Watcher.redeployFile(fileName); } else if (UNDEPLOY_TYPE.equals(type)) { Watcher.undeployFile(fileName); } } html = getApplications(); } else { html = toLoginPage(Boolean.FALSE); } Writer writer = response.getWriter(); try { writer.write(html); } finally { writer.close(); } } @Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String userName = request.getParameter(USER_PARAMETER_NAME); String password = request.getParameter(PASS_PARAMETER_NAME); boolean valid = ObjectUtils.available(userName) && ObjectUtils.available(password); if (valid) { valid = authenticate(userName, password, request.getSession(Boolean.TRUE)); } if (valid) { response.sendRedirect("DeployManager"); } else { String html = toLoginPage(Boolean.TRUE); Writer writer = response.getWriter(); try { writer.write(html); } finally { writer.close(); } } } }
improved DeployManager class
src/main/java/org/lightmare/deploy/management/DeployManager.java
improved DeployManager class
Java
unlicense
78a8726931332f48d8f4b51f75e467fa5d5348d9
0
wraziens/Trickle
package cornell.trickleapp; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.Iterator; import android.annotation.SuppressLint; import android.app.Activity; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.graphics.Color; import android.graphics.drawable.Drawable; import android.graphics.drawable.GradientDrawable; import android.graphics.drawable.LayerDrawable; import android.os.Bundle; import android.os.Vibrator; import android.preference.PreferenceManager; import android.text.InputType; import android.view.Gravity; import android.view.View; import android.view.Window; import android.view.WindowManager; import android.widget.EditText; import android.widget.ImageView; import android.widget.Toast; public class DrinkCounter extends Activity { private static FlyOutContainer root; private int drink_count = 0; private DatabaseHandler db; private double bac; private Vibrator click_vibe; private int face_color; private int face_icon; private boolean clicked; private Date start_date; private ArrayList<Date> drinkDates; private ArrayList<DatabaseStore> startDates; private final Double CALORIES_PER_DRINK = 120.0; private final Double CALORIES_HOT_DOG = 250.0; @SuppressLint("NewApi") @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); requestWindowFeature(Window.FEATURE_NO_TITLE); getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); this.root = (FlyOutContainer) this.getLayoutInflater().inflate( R.layout.drink_tracker, null); this.setContentView(root); click_vibe = (Vibrator)getSystemService(Context.VIBRATOR_SERVICE); clicked=false; db = new DatabaseHandler(this); start(); updateFace(); SharedPreferences getPrefs = PreferenceManager .getDefaultSharedPreferences(getBaseContext()); Boolean checkSurveyed = getPrefs.getBoolean("hints", true); if (checkSurveyed) { Intent openTutorial = new Intent(this, DrinkCounterTutorial.class); startActivity(openTutorial); } } @Override protected void onResume() { super.onResume(); start(); recalculateBac(); updateFace(); clicked = false; } @Override protected void onStop() { db.close(); super.onStop(); finish(); } private void start() { Date date = new Date(); Date delayedDate = DatabaseStore.getDelayedDate(); ArrayList<DatabaseStore> drink_count_vals = (ArrayList<DatabaseStore>) db .getVarValuesDelay("drink_count", new Date()); if(drink_count_vals != null){ drink_count_vals = DatabaseStore.sortByTime(drink_count_vals); //Get the stored StartDates startDates = (ArrayList<DatabaseStore>)db.getVarValuesDelay("start_date", date); if(startDates!= null){ startDates = DatabaseStore.sortByTime(startDates); start_date = DatabaseStore.retrieveDate(startDates.get(startDates.size()-1).value); if(start_date == null){ start_date = delayedDate; } }else{ start_date = delayedDate; db.addDelayValue("start_date", start_date); } drink_count = Integer.parseInt(drink_count_vals.get( drink_count_vals.size()-1).value); recalculateBac(); }else{ //Check to see if residual BAC value from day Prior Date yesterday = DatabaseStore.getYesterday(); ArrayList<DatabaseStore> yesterday_drink_count = (ArrayList<DatabaseStore>) db .getVarValuesDelay("drink_count", yesterday); if(yesterday_drink_count != null){ yesterday_drink_count = DatabaseStore.sortByTime(yesterday_drink_count); //Get the stored StartDates for yesterday startDates = (ArrayList<DatabaseStore>)db.getVarValuesDelay("start_date", yesterday); if(startDates!= null){ startDates = DatabaseStore.sortByTime(startDates); start_date = DatabaseStore.retrieveDate(startDates.get(startDates.size()-1).value); if(start_date != null){ drink_count = Integer.parseInt(yesterday_drink_count.get( yesterday_drink_count.size()-1).value); double currentBAC = calculateBac(start_date, delayedDate, drink_count); if (currentBAC > 0){ //Add the start value to the db. db.addDelayValue("start_date", start_date); db.addDelayValue("drink_count", drink_count); db.addDelayValue("bac", String.valueOf(currentBAC)); updateFace(); db.addDelayValue("bac_color", String.valueOf(face_color)); } }else{ //No remaining values from previous day - start fresh start_date = null; drink_count = 0; bac = 0; } } }else{ //No remaining values from previous day - start fresh start_date = null; drink_count = 0; bac = 0; } } } public void removeLast() { drink_count--; Date date = new Date(); ArrayList<DatabaseStore> drink_count_vals = (ArrayList<DatabaseStore>) db .getVarValuesDelay("drink_count", date); drink_count_vals = DatabaseStore.sortByTime(drink_count_vals); ArrayList<String> variables = new ArrayList<String>(); variables.add(drink_count_vals.get( drink_count_vals.size() - 1).variable); variables.add("bac"); variables.add("bac_color"); variables.add("hotdog"); ArrayList<DatabaseStore> hd_val = ((ArrayList<DatabaseStore>)db .getVarValuesForDay("hotdog",date)); if(hd_val!=null){ if(hd_val.size() ==1){ if(Integer.valueOf(hd_val.get(0).value) >0){ db.updateOrAdd("hotdog", Integer.valueOf(hd_val.get(0).value) - 1); } } } if (drink_count_vals.size() == 1) { ArrayList<String> vals = new ArrayList<String>(); vals.add("drank_last_night"); vals.add("tracked"); db.deleteValuesTomorrow(vals); variables.add("drank"); } db.deleteVaribles(variables, drink_count_vals.get(drink_count_vals.size() - 1)); recalculateBac(); updateFace(); Toast.makeText(getApplicationContext(), "Your last drink has been removed", Toast.LENGTH_SHORT).show(); } public static int getBacColor(double bac_value) { if (bac_value < 0.06) { return Color.rgb(112,191, 65); } else if (bac_value < 0.15) { return Color.rgb(245, 211,40); } else if (bac_value < 0.24) { return Color.rgb(236, 93, 87); } else { return Color.DKGRAY; } } public void setFaceIcon(double bac_value){ if (bac_value < 0.06) { face_icon = R.drawable.ic_tracker_smile; } else if (bac_value < 0.15) { face_icon = R.drawable.ic_tracker_neutral; } else if (bac_value < 0.24) { face_icon = R.drawable.ic_tracker_frown; } else { face_icon = R.drawable.ic_tracker_dead; } } private void updateFace(){ setContentView(R.layout.drink_tracker); face_color = getBacColor(bac); setFaceIcon(bac); ImageView face = (ImageView)findViewById(R.id.drink_smile); //Update the face color ((GradientDrawable)((LayerDrawable) face.getDrawable()).getDrawable(0) ).setColor(face_color); //Update the face icon Drawable to_replace = getResources().getDrawable(face_icon); ((LayerDrawable) face.getDrawable()).setDrawableByLayerId( R.id.face_icon, to_replace); face.invalidate(); face.refreshDrawableState(); } private void recalculateBac(){ Date date = DatabaseStore.getDelayedDate(); bac = calculateBac(start_date, date, drink_count); } //Should NOT save anything to DB in this function! private double calculateBac(Date start, Date end, int number_drinks) { if(number_drinks <= 0){ return 0.0; } if(start == null){ start(); } // get the users gender ArrayList<DatabaseStore> stored_gender = (ArrayList<DatabaseStore>) db .getAllVarValue("gender"); // If user did not set gender use "Female" as default String gender = "Female"; if (stored_gender != null) { gender = stored_gender.get(0).value; } // fetch the users weight ArrayList<DatabaseStore> stored_weight = (ArrayList<DatabaseStore>) db .getAllVarValue("weight"); Integer weight_lbs = 120; if (stored_weight != null) { weight_lbs = Integer.parseInt(stored_weight.get(0).value); } double metabolism_constant = 0; double gender_constant = 0; double weight_kilograms = weight_lbs * 0.453592; if (gender.equals("Male")) { metabolism_constant = 0.015; gender_constant = 0.58; } else { metabolism_constant = 0.017; gender_constant = 0.49; } //getTime returns in milliseconds. Divide by 1000 to convert to seconds, 60 to convert // to minutes and 60 to convert to hours. long time_elapsed = (end.getTime()-start.getTime()) / (1000 * 60 * 60); if (number_drinks > 1){ double lastBac = ((0.806 * (number_drinks - 1) * 1.2) / (gender_constant * weight_kilograms)) - (metabolism_constant * time_elapsed); } double bac_update = ((0.806 * number_drinks * 1.2) / (gender_constant * weight_kilograms)) - (metabolism_constant * time_elapsed); return bac_update; } @SuppressLint("NewApi") public void hadDrink(View view) { clicked=true; Date date = new Date(); Date delayedDate = DatabaseStore.getDelayedDate(); //First drink of the session, add to db if(start_date == null){ start_date = delayedDate; db.addDelayValue("start_date", start_date); } if(drink_count > 0){ //Get last BAC at current Time double lastBAC = calculateBac(start_date, delayedDate, drink_count); //If BAC is <=0 then it is a new 'drinking session' so reset values if(lastBAC <= 0){ drink_count = 0; start_date = delayedDate; db.addDelayValue("start_date", start_date); } } drink_count++; if (drink_count == 1 ) { db.addValueTomorrow("drank_last_night", "True"); db.updateOrAdd("drank", "True"); } //Add the drink_count to the DB db.addDelayValue("drink_count", drink_count); recalculateBac(); updateFace(); //Add BAC value and BAC color to the database db.addDelayValue("bac", String.valueOf(bac)); db.addDelayValue("bac_color", String.valueOf(face_color)); // calculate number of hot dogs that equate the number of calories Double drink_cals = drink_count * CALORIES_PER_DRINK; int number_hot_dogs = (int) Math.ceil(drink_cals/ CALORIES_HOT_DOG); db.updateOrAdd("hot_dogs", number_hot_dogs); } private void injectDrink(int minutesDelay){ drink_count++; Date date = new Date(); //get the existing values from the DB ArrayList<DatabaseStore> counts_recent = (ArrayList<DatabaseStore>)db.getVarValuesDelay( "drink_count", date); ArrayList<DatabaseStore> bac_recent = (ArrayList<DatabaseStore>)db.getVarValuesDelay( "bac", date); ArrayList<DatabaseStore> colors_recent = (ArrayList<DatabaseStore>)db.getVarValuesDelay( "bac_color", date); //Construct the time where we want to inject GregorianCalendar gc = new GregorianCalendar(); gc.setTime(date); gc.add(Calendar.HOUR_OF_DAY, -6); gc.add(Calendar.MINUTE, minutesDelay*-1); date = gc.getTime(); int value_inject = 0; double bac_inject =0.0; int color_inject = 0; if(counts_recent != null){ //Sort all values by date counts_recent = DatabaseStore.sortByTime(counts_recent); bac_recent = DatabaseStore.sortByTime(bac_recent); colors_recent = DatabaseStore.sortByTime(colors_recent); assert(bac_recent.size()== counts_recent.size()); assert(colors_recent.size() == counts_recent.size()); boolean placed = false; Iterator<DatabaseStore> iterator = counts_recent.iterator(); Date start_date = counts_recent.get(0).date; int index_val = 0; while (iterator.hasNext()){ DatabaseStore ds = iterator.next(); if(ds.date.after(date)){ if (!placed){ value_inject = Integer.parseInt(ds.value); bac_inject = calculateBac(start_date, date, Integer.parseInt( ds.value)); color_inject = getBacColor(bac_inject); placed=true; } //update drink count Integer new_count_val = Integer.parseInt(ds.value) + 1; ds.value = new_count_val.toString(); db.updateQuestion(ds); //update bac double new_bac = calculateBac(start_date, ds.date, new_count_val); DatabaseStore d = bac_recent.get(index_val); d.value = String.valueOf(new_bac); db.updateQuestion(d); //update Bac Color int new_bac_color = getBacColor(new_bac); d = colors_recent.get(index_val); d.value =String.valueOf(new_bac_color); db.updateQuestion(d); } index_val += 1; } }else{ value_inject = 1; bac_inject = calculateBac(start_date, date, 1); color_inject = getBacColor(bac_inject); } //Add the injected question DatabaseStore data_store = DatabaseStore.DatabaseIntegerStore("drink_count", String.valueOf(value_inject), date); db.addQuestion(data_store); data_store = DatabaseStore.DatabaseIntegerStore("bac", String.valueOf(bac_inject), date); db.addQuestion(data_store); data_store = DatabaseStore.DatabaseIntegerStore("bac_color", String.valueOf(color_inject), date); db.addQuestion(data_store); recalculateBac(); updateFace(); } public void addDrinkHandler(View view){ click_vibe.vibrate(75); Toast t = Toast.makeText(getApplicationContext(), "Adding a drink." , Toast.LENGTH_SHORT); t.setGravity(Gravity.TOP, 0, 100); t.show(); updateFace(); hadDrink(view); } public void removeLastHandler(View view){ click_vibe.vibrate(20); if (clicked == true){ new AlertDialog.Builder(this) .setTitle("Remove Last Drink") .setMessage("Are you sure you would like to remove your last recorded drink?") .setPositiveButton(R.string.yes, new DialogInterface.OnClickListener(){ @Override public void onClick(DialogInterface dialog, int which) { clicked=false; removeLast(); } }) .setNegativeButton(R.string.no, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }) .show(); }else{ new AlertDialog.Builder(this) .setTitle("Remove Last Drink") .setMessage("Your last drink was already removed or you did not record any drinks recently.") .setPositiveButton(android.R.string.yes, null).show(); } } public void injectDrinkHandler(View view){ click_vibe.vibrate(20); final String options[] = new String[] {"3 Hours Ago", "2.5 Hours Ago", "2 Hours Ago", "1.5 Hours Ago", "1 Hour Ago", "45 Minutes Ago", "30 Minutes Ago", "15 Minutes Ago"}; final int values[] = new int[] {180, 150, 120, 90, 60, 45, 30, 15}; final Context context = this; new AlertDialog.Builder(this) .setTitle("Record a Drink I had: ") .setItems(options, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { final int v = values[which]; new AlertDialog.Builder(context) .setTitle("Add Drink in Past") .setMessage("Are you sure you would like to record a drink you had "+options[which] +"?") .setPositiveButton(R.string.yes, new DialogInterface.OnClickListener(){ @Override public void onClick(DialogInterface dialog, int which) { injectDrink(v); } }) .setNegativeButton(R.string.no, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }) .show(); } }) .setNegativeButton(android.R.string.no, null).show(); } public void trackMoney(View view){ click_vibe.vibrate(20); final EditText input = new EditText(this); input.setInputType(InputType.TYPE_CLASS_NUMBER | InputType.TYPE_NUMBER_FLAG_DECIMAL); new AlertDialog.Builder(this).setTitle("Enter Amount you spent on Alcohol").setView( input).setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener(){ @Override public void onClick(DialogInterface dialog, int which) { double value = Double.parseDouble(input.getText().toString()); DecimalFormat formatter = new DecimalFormat("#.##"); db.addDelayValue("money", formatter.format(value)); } }) .setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }) .show(); } public void toggleMenu(View v) { this.root.toggleMenu(); } }
DrinkingApp/src/cornell/trickleapp/DrinkCounter.java
package cornell.trickleapp; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.Iterator; import android.annotation.SuppressLint; import android.app.Activity; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.graphics.Color; import android.graphics.drawable.Drawable; import android.graphics.drawable.GradientDrawable; import android.graphics.drawable.LayerDrawable; import android.os.Bundle; import android.os.Vibrator; import android.preference.PreferenceManager; import android.text.InputType; import android.view.Gravity; import android.view.View; import android.view.Window; import android.view.WindowManager; import android.widget.EditText; import android.widget.ImageView; import android.widget.Toast; public class DrinkCounter extends Activity { private static FlyOutContainer root; private int drink_count = 0; private DatabaseHandler db; private double bac; private Vibrator click_vibe; private int face_color; private int face_icon; private boolean clicked; private Date start_date; private ArrayList<Date> drinkDates; private ArrayList<DatabaseStore> startDates; private final Double CALORIES_PER_DRINK = 120.0; private final Double CALORIES_HOT_DOG = 250.0; @SuppressLint("NewApi") @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); requestWindowFeature(Window.FEATURE_NO_TITLE); getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); this.root = (FlyOutContainer) this.getLayoutInflater().inflate( R.layout.drink_tracker, null); this.setContentView(root); click_vibe = (Vibrator)getSystemService(Context.VIBRATOR_SERVICE); clicked=false; db = new DatabaseHandler(this); start(); updateFace(); SharedPreferences getPrefs = PreferenceManager .getDefaultSharedPreferences(getBaseContext()); Boolean checkSurveyed = getPrefs.getBoolean("hints", true); if (checkSurveyed) { Intent openTutorial = new Intent(this, DrinkCounterTutorial.class); startActivity(openTutorial); } } @Override protected void onResume() { super.onResume(); start(); recalculateBac(); updateFace(); clicked = false; } @Override protected void onStop() { db.close(); super.onStop(); finish(); } private void start() { Date date = new Date(); Date delayedDate = DatabaseStore.getDelayedDate(); ArrayList<DatabaseStore> drink_count_vals = (ArrayList<DatabaseStore>) db .getVarValuesDelay("drink_count", new Date()); if(drink_count_vals != null){ Toast.makeText(this, "not null ", Toast.LENGTH_SHORT).show( ); drink_count_vals = DatabaseStore.sortByTime(drink_count_vals); //Get the stored StartDates startDates = (ArrayList<DatabaseStore>)db.getVarValuesDelay("start_date", date); if(startDates!= null){ startDates = DatabaseStore.sortByTime(startDates); start_date = DatabaseStore.retrieveDate(startDates.get(startDates.size()-1).value); if(start_date == null){ start_date = delayedDate; } }else{ start_date = delayedDate; db.addDelayValue("start_date", start_date); } drink_count = Integer.parseInt(drink_count_vals.get( drink_count_vals.size()-1).value); Toast.makeText(this, "drink_count " + drink_count, Toast.LENGTH_SHORT).show(); recalculateBac(); Toast.makeText(this, "bac" + bac, Toast.LENGTH_SHORT).show(); }else{ Toast.makeText(this, "null ", Toast.LENGTH_SHORT).show(); //Check to see if residual BAC value from day Prior Date yesterday = DatabaseStore.getYesterday(); ArrayList<DatabaseStore> yesterday_drink_count = (ArrayList<DatabaseStore>) db .getVarValuesDelay("drink_count", yesterday); if(yesterday_drink_count != null){ yesterday_drink_count = DatabaseStore.sortByTime(yesterday_drink_count); //Get the stored StartDates for yesterday startDates = (ArrayList<DatabaseStore>)db.getVarValuesDelay("start_date", yesterday); if(startDates!= null){ startDates = DatabaseStore.sortByTime(startDates); start_date = DatabaseStore.retrieveDate(startDates.get(startDates.size()-1).value); Toast.makeText(this, "num StartDates " + startDates.size(), Toast.LENGTH_SHORT).show(); if(start_date != null){ drink_count = Integer.parseInt(yesterday_drink_count.get( yesterday_drink_count.size()-1).value); Toast.makeText(this, "drink_count" + drink_count, Toast.LENGTH_SHORT).show(); double currentBAC = calculateBac(start_date, delayedDate, drink_count); if (currentBAC > 0){ //Add the start value to the db. db.addDelayValue("start_date", start_date); db.addDelayValue("drink_count", drink_count); db.addDelayValue("bac", String.valueOf(currentBAC)); Toast.makeText(this, "current_bac " + bac, Toast.LENGTH_SHORT).show(); updateFace(); db.addDelayValue("bac_color", String.valueOf(face_color)); } }else{ //No remaining values from previous day - start fresh start_date = null; drink_count = 0; bac = 0; } } }else{ //No remaining values from previous day - start fresh start_date = null; drink_count = 0; bac = 0; } } } public void removeLast() { drink_count--; Date date = new Date(); ArrayList<DatabaseStore> drink_count_vals = (ArrayList<DatabaseStore>) db .getVarValuesDelay("drink_count", date); drink_count_vals = DatabaseStore.sortByTime(drink_count_vals); ArrayList<String> variables = new ArrayList<String>(); variables.add(drink_count_vals.get( drink_count_vals.size() - 1).variable); variables.add("bac"); variables.add("bac_color"); variables.add("hotdog"); ArrayList<DatabaseStore> hd_val = ((ArrayList<DatabaseStore>)db .getVarValuesForDay("hotdog",date)); if(hd_val!=null){ if(hd_val.size() ==1){ if(Integer.valueOf(hd_val.get(0).value) >0){ db.updateOrAdd("hotdog", Integer.valueOf(hd_val.get(0).value) - 1); } } } if (drink_count_vals.size() == 1) { ArrayList<String> vals = new ArrayList<String>(); vals.add("drank_last_night"); vals.add("tracked"); db.deleteValuesTomorrow(vals); variables.add("drank"); } db.deleteVaribles(variables, drink_count_vals.get(drink_count_vals.size() - 1)); recalculateBac(); updateFace(); Toast.makeText(getApplicationContext(), "Your last drink has been removed", Toast.LENGTH_SHORT).show(); } public static int getBacColor(double bac_value) { if (bac_value < 0.06) { return Color.rgb(112,191, 65); } else if (bac_value < 0.15) { return Color.rgb(245, 211,40); } else if (bac_value < 0.24) { return Color.rgb(236, 93, 87); } else { return Color.DKGRAY; } } public void setFaceIcon(double bac_value){ if (bac_value < 0.06) { face_icon = R.drawable.ic_tracker_smile; } else if (bac_value < 0.15) { face_icon = R.drawable.ic_tracker_neutral; } else if (bac_value < 0.24) { face_icon = R.drawable.ic_tracker_frown; } else { face_icon = R.drawable.ic_tracker_dead; } } private void updateFace(){ setContentView(R.layout.drink_tracker); face_color = getBacColor(bac); setFaceIcon(bac); ImageView face = (ImageView)findViewById(R.id.drink_smile); //Update the face color ((GradientDrawable)((LayerDrawable) face.getDrawable()).getDrawable(0) ).setColor(face_color); //Update the face icon Drawable to_replace = getResources().getDrawable(face_icon); ((LayerDrawable) face.getDrawable()).setDrawableByLayerId( R.id.face_icon, to_replace); face.invalidate(); face.refreshDrawableState(); } private void recalculateBac(){ Date date = DatabaseStore.getDelayedDate(); bac = calculateBac(start_date, date, drink_count); } //Should NOT save anything to DB in this function! private double calculateBac(Date start, Date end, int number_drinks) { if(number_drinks <= 0){ return 0.0; } if(start == null){ start(); } // get the users gender ArrayList<DatabaseStore> stored_gender = (ArrayList<DatabaseStore>) db .getAllVarValue("gender"); // If user did not set gender use "Female" as default String gender = "Female"; if (stored_gender != null) { gender = stored_gender.get(0).value; } // fetch the users weight ArrayList<DatabaseStore> stored_weight = (ArrayList<DatabaseStore>) db .getAllVarValue("weight"); Integer weight_lbs = 120; if (stored_weight != null) { weight_lbs = Integer.parseInt(stored_weight.get(0).value); } double metabolism_constant = 0; double gender_constant = 0; double weight_kilograms = weight_lbs * 0.453592; if (gender.equals("Male")) { metabolism_constant = 0.015; gender_constant = 0.58; } else { metabolism_constant = 0.017; gender_constant = 0.49; } //getTime returns in milliseconds. Divide by 1000 to convert to seconds, 60 to convert // to minutes and 60 to convert to hours. long time_elapsed = (end.getTime()-start.getTime()) / (1000 * 60 * 60); if (number_drinks > 1){ double lastBac = ((0.806 * (number_drinks - 1) * 1.2) / (gender_constant * weight_kilograms)) - (metabolism_constant * time_elapsed); } double bac_update = ((0.806 * number_drinks * 1.2) / (gender_constant * weight_kilograms)) - (metabolism_constant * time_elapsed); return bac_update; } @SuppressLint("NewApi") public void hadDrink(View view) { clicked=true; Date date = new Date(); Date delayedDate = DatabaseStore.getDelayedDate(); //First drink of the session, add to db if(start_date == null){ start_date = delayedDate; db.addDelayValue("start_date", start_date); } if(drink_count > 0){ //Get last BAC at current Time double lastBAC = calculateBac(start_date, delayedDate, drink_count); //If BAC is <=0 then it is a new 'drinking session' so reset values if(lastBAC <= 0){ drink_count = 0; Toast.makeText(this, "reset " + lastBAC, Toast.LENGTH_SHORT).show(); start_date = delayedDate; db.addDelayValue("start_date", start_date); } } drink_count++; if (drink_count == 1 ) { db.addValueTomorrow("drank_last_night", "True"); db.updateOrAdd("drank", "True"); } //Add the drink_count to the DB db.addDelayValue("drink_count", drink_count); recalculateBac(); Toast.makeText(this, "bac " + bac, Toast.LENGTH_SHORT).show(); Toast.makeText(this, "drink_count " + drink_count, Toast.LENGTH_SHORT).show(); //start(); updateFace(); //Add BAC value and BAC color to the database db.addDelayValue("bac", String.valueOf(bac)); db.addDelayValue("bac_color", String.valueOf(face_color)); // calculate number of hot dogs that equate the number of calories Double drink_cals = drink_count * CALORIES_PER_DRINK; int number_hot_dogs = (int) Math.ceil(drink_cals/ CALORIES_HOT_DOG); db.updateOrAdd("hot_dogs", number_hot_dogs); } private void injectDrink(int minutesDelay){ drink_count++; Date date = new Date(); //get the existing values from the DB ArrayList<DatabaseStore> counts_recent = (ArrayList<DatabaseStore>)db.getVarValuesDelay( "drink_count", date); ArrayList<DatabaseStore> bac_recent = (ArrayList<DatabaseStore>)db.getVarValuesDelay( "bac", date); ArrayList<DatabaseStore> colors_recent = (ArrayList<DatabaseStore>)db.getVarValuesDelay( "bac_color", date); //Construct the time where we want to inject GregorianCalendar gc = new GregorianCalendar(); gc.setTime(date); gc.add(Calendar.HOUR_OF_DAY, -6); gc.add(Calendar.MINUTE, minutesDelay*-1); date = gc.getTime(); int value_inject = 0; double bac_inject =0.0; int color_inject = 0; if(counts_recent != null){ //Sort all values by date counts_recent = DatabaseStore.sortByTime(counts_recent); bac_recent = DatabaseStore.sortByTime(bac_recent); colors_recent = DatabaseStore.sortByTime(colors_recent); assert(bac_recent.size()== counts_recent.size()); assert(colors_recent.size() == counts_recent.size()); boolean placed = false; Iterator<DatabaseStore> iterator = counts_recent.iterator(); Date start_date = counts_recent.get(0).date; int index_val = 0; while (iterator.hasNext()){ DatabaseStore ds = iterator.next(); if(ds.date.after(date)){ if (!placed){ value_inject = Integer.parseInt(ds.value); bac_inject = calculateBac(start_date, date, Integer.parseInt( ds.value)); color_inject = getBacColor(bac_inject); placed=true; } //update drink count Integer new_count_val = Integer.parseInt(ds.value) + 1; ds.value = new_count_val.toString(); db.updateQuestion(ds); //update bac double new_bac = calculateBac(start_date, ds.date, new_count_val); DatabaseStore d = bac_recent.get(index_val); d.value = String.valueOf(new_bac); db.updateQuestion(d); //update Bac Color int new_bac_color = getBacColor(new_bac); d = colors_recent.get(index_val); d.value =String.valueOf(new_bac_color); db.updateQuestion(d); } index_val += 1; } }else{ value_inject = 1; bac_inject = calculateBac(start_date, date, 1); color_inject = getBacColor(bac_inject); } //Add the injected question DatabaseStore data_store = DatabaseStore.DatabaseIntegerStore("drink_count", String.valueOf(value_inject), date); db.addQuestion(data_store); data_store = DatabaseStore.DatabaseIntegerStore("bac", String.valueOf(bac_inject), date); db.addQuestion(data_store); data_store = DatabaseStore.DatabaseIntegerStore("bac_color", String.valueOf(color_inject), date); db.addQuestion(data_store); recalculateBac(); updateFace(); } public void addDrinkHandler(View view){ click_vibe.vibrate(75); Toast t = Toast.makeText(getApplicationContext(), "Adding a drink." , Toast.LENGTH_SHORT); t.setGravity(Gravity.TOP, 0, 100); t.show(); updateFace(); hadDrink(view); } public void removeLastHandler(View view){ click_vibe.vibrate(20); if (clicked == true){ new AlertDialog.Builder(this) .setTitle("Remove Last Drink") .setMessage("Are you sure you would like to remove your last recorded drink?") .setPositiveButton(R.string.yes, new DialogInterface.OnClickListener(){ @Override public void onClick(DialogInterface dialog, int which) { clicked=false; removeLast(); } }) .setNegativeButton(R.string.no, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }) .show(); }else{ new AlertDialog.Builder(this) .setTitle("Remove Last Drink") .setMessage("Your last drink was already removed or you did not record any drinks recently.") .setPositiveButton(android.R.string.yes, null).show(); } } public void injectDrinkHandler(View view){ click_vibe.vibrate(20); final String options[] = new String[] {"3 Hours Ago", "2.5 Hours Ago", "2 Hours Ago", "1.5 Hours Ago", "1 Hour Ago", "45 Minutes Ago", "30 Minutes Ago", "15 Minutes Ago"}; final int values[] = new int[] {180, 150, 120, 90, 60, 45, 30, 15}; final Context context = this; new AlertDialog.Builder(this) .setTitle("Record a Drink I had: ") .setItems(options, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { final int v = values[which]; new AlertDialog.Builder(context) .setTitle("Add Drink in Past") .setMessage("Are you sure you would like to record a drink you had "+options[which] +"?") .setPositiveButton(R.string.yes, new DialogInterface.OnClickListener(){ @Override public void onClick(DialogInterface dialog, int which) { injectDrink(v); } }) .setNegativeButton(R.string.no, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }) .show(); } }) .setNegativeButton(android.R.string.no, null).show(); } public void trackMoney(View view){ click_vibe.vibrate(20); final EditText input = new EditText(this); input.setInputType(InputType.TYPE_CLASS_NUMBER | InputType.TYPE_NUMBER_FLAG_DECIMAL); new AlertDialog.Builder(this).setTitle("Enter Amount you spent on Alcohol").setView( input).setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener(){ @Override public void onClick(DialogInterface dialog, int which) { double value = Double.parseDouble(input.getText().toString()); DecimalFormat formatter = new DecimalFormat("#.##"); db.addDelayValue("money", formatter.format(value)); } }) .setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }) .show(); } public void toggleMenu(View v) { this.root.toggleMenu(); } }
Remove debug toasts
DrinkingApp/src/cornell/trickleapp/DrinkCounter.java
Remove debug toasts
Java
apache-2.0
1f53bcaa48746b19371d7b02a5d1b156c20cea94
0
tapglue/android_sdk,tapglue/android_sdk
/* * Copyright (c) 2015-2016 Tapglue (https://www.tapglue.com/). All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tapglue.android.entities; import com.google.gson.annotations.SerializedName; import java.util.List; import java.util.Map; public class Post { private String id; @SerializedName("visibility") private int visibility; @SerializedName("user_id") private String userId; private List<String> tags; private List<Attachment> attachments; private Counts counts; @SerializedName("created_at") private String createdAt; @SerializedName("updated_at") private String updatedAt; @SerializedName("is_liked") private boolean isLiked; private User user; public Post(List<Attachment> attachments, Visibility visibility) { this.attachments = attachments; this.visibility = visibility.getVisibility(); } public Visibility getVisibility() { return Visibility.convert(visibility); } public String getId() { return id; } public String getUserId() { return userId; } public User getUser() { return user; } public List<String> getTags() { return tags; } public void setTags(List<String> tags) { this.tags = tags; } public List<Attachment> getAttachments() { return attachments; } public Counts getCounts() { return counts; } public String getCreatedAt() { return createdAt; } public String getUpdatedAt() { return updatedAt; } public boolean isLiked() { return isLiked; } public void setUser(User user) { this.user = user; } public enum Visibility { PRIVATE(10), CONNECTION(20), PUBLIC(30); private int rawVisibility; Visibility(int visibility) { this.rawVisibility = visibility; } private static Visibility convert(int raw) { switch(raw) { case 10: return PRIVATE; case 20: return CONNECTION; case 30: return PUBLIC; default: throw new IllegalArgumentException(); } } private int getVisibility() { return rawVisibility; } } public static class Attachment { private Map<String, String> contents; private Type type; private String name; public Attachment(Map<String, String> contents, Type type, String name) { this.contents = contents; this.type = type; this.name = name; } public enum Type { @SerializedName("text") TEXT, @SerializedName("url") URL } public Map<String, String> getContents() { return contents; } public Type getType() { return type; } public String getName() { return name; } } public static class Counts { private long comments; private long likes; public long getLikes() { return likes; } public long getComments() { return comments; } } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Post post = (Post) o; return id != null ? id.equals(post.id) : post.id == null; } @Override public int hashCode() { return id != null ? id.hashCode() : 0; } }
tapglue-android-sdk/src/main/java/com/tapglue/android/entities/Post.java
/* * Copyright (c) 2015-2016 Tapglue (https://www.tapglue.com/). All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tapglue.android.entities; import com.google.gson.annotations.SerializedName; import java.util.List; import java.util.Map; public class Post { private String id; @SerializedName("visibility") private int visibility; @SerializedName("user_id") private String userId; private List<String> tags; private List<Attachment> attachments; private Counts counts; private String createdAt; private String updatedAt; @SerializedName("is_liked") private boolean isLiked; private User user; public Post(List<Attachment> attachments, Visibility visibility) { this.attachments = attachments; this.visibility = visibility.getVisibility(); } public Visibility getVisibility() { return Visibility.convert(visibility); } public String getId() { return id; } public String getUserId() { return userId; } public User getUser() { return user; } public List<String> getTags() { return tags; } public void setTags(List<String> tags) { this.tags = tags; } public List<Attachment> getAttachments() { return attachments; } public Counts getCounts() { return counts; } public String getCreatedAt() { return createdAt; } public String getUpdatedAt() { return updatedAt; } public boolean isLiked() { return isLiked; } public void setUser(User user) { this.user = user; } public enum Visibility { PRIVATE(10), CONNECTION(20), PUBLIC(30); private int rawVisibility; Visibility(int visibility) { this.rawVisibility = visibility; } private static Visibility convert(int raw) { switch(raw) { case 10: return PRIVATE; case 20: return CONNECTION; case 30: return PUBLIC; default: throw new IllegalArgumentException(); } } private int getVisibility() { return rawVisibility; } } public static class Attachment { private Map<String, String> contents; private Type type; private String name; public Attachment(Map<String, String> contents, Type type, String name) { this.contents = contents; this.type = type; this.name = name; } public enum Type { @SerializedName("text") TEXT, @SerializedName("url") URL } public Map<String, String> getContents() { return contents; } public Type getType() { return type; } public String getName() { return name; } } public static class Counts { private long comments; private long likes; public long getLikes() { return likes; } public long getComments() { return comments; } } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Post post = (Post) o; return id != null ? id.equals(post.id) : post.id == null; } @Override public int hashCode() { return id != null ? id.hashCode() : 0; } }
Fix updatedAt not correctly parsed
tapglue-android-sdk/src/main/java/com/tapglue/android/entities/Post.java
Fix updatedAt not correctly parsed
Java
apache-2.0
1517b93d2b02314e293f46fe9d7a543a8d0699e5
0
apache/solr,apache/solr,apache/solr,apache/solr,apache/solr
package org.apache.solr.common.luke; /** * @version $Id: AdminHandlers.java 608150 2008-01-02 17:15:30Z ryan $ * @since solr 1.3 */ public enum FieldFlag { INDEXED('I', "Indexed"), TOKENIZED('T', "Tokenized"), STORED('S', "Stored"), MULTI_VALUED('M', "Multivalued"), TERM_VECTOR_STORED('V', "TermVector Stored"), TERM_VECTOR_OFFSET('o', "Store Offset With TermVector"), TERM_VECTOR_POSITION('p', "Store Position With TermVector"), OMIT_NORMS('O', "Omit Norms"), LAZY('L', "Lazy"), BINARY('B', "Binary"), COMPRESSED('C', "Compressed"), SORT_MISSING_FIRST('f', "Sort Missing First"), SORT_MISSING_LAST('l', "Sort Missing Last"); private final char abbreviation; private final String display; FieldFlag(char abbreviation, String display) { this.abbreviation = abbreviation; this.display = display; this.display.intern();//QUESTION: Need we bother here? } public static FieldFlag getFlag(char abbrev){ FieldFlag result = null; FieldFlag [] vals = FieldFlag.values(); for (int i = 0; i < vals.length; i++) { if (vals[i].getAbbreviation() == abbrev){ result = vals[i]; break; } } return result; } public char getAbbreviation() { return abbreviation; } public String getDisplay() { return display; } }
src/java/org/apache/solr/common/luke/FieldFlag.java
package org.apache.solr.common.luke; /** * The FieldFlag class is used to store * **/ public enum FieldFlag { INDEXED('I', "Indexed"), TOKENIZED('T', "Tokenized"), STORED('S', "Stored"), MULTI_VALUED('M', "Multivalued"), TERM_VECTOR_STORED('V', "TermVector Stored"), TERM_VECTOR_OFFSET('o', "Store Offset With TermVector"), TERM_VECTOR_POSITION('p', "Store Position With TermVector"), OMIT_NORMS('O', "Omit Norms"), LAZY('L', "Lazy"), BINARY('B', "Binary"), COMPRESSED('C', "Compressed"), SORT_MISSING_FIRST('f', "Sort Missing First"), SORT_MISSING_LAST('l', "Sort Missing Last"); private char abbreviation; private String display; FieldFlag(char abbreviation, String display) { this.abbreviation = abbreviation; this.display = display; this.display.intern();//QUESTION: Need we bother here? } public static FieldFlag getFlag(char abbrev){ FieldFlag result = null; FieldFlag [] vals = FieldFlag.values(); for (int i = 0; i < vals.length; i++) { if (vals[i].getAbbreviation() == abbrev){ result = vals[i]; break; } } return result; } public char getAbbreviation() { return abbreviation; } public String getDisplay() { return display; } }
SOLR-359 -- changing formatting (2 spaces, each enum value on a single line) git-svn-id: 3b1ff1236863b4d63a22e4dae568675c2e247730@608152 13f79535-47bb-0310-9956-ffa450edef68
src/java/org/apache/solr/common/luke/FieldFlag.java
SOLR-359 -- changing formatting (2 spaces, each enum value on a single line)
Java
apache-2.0
563fc5355025cfc31a364f104de8098476f82782
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.execution.ui; import com.intellij.openapi.ui.panel.ComponentPanelBuilder; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.JBDimension; import com.intellij.util.ui.JBUI; import javax.swing.*; import java.awt.*; import java.awt.event.ComponentAdapter; import java.awt.event.ComponentEvent; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.List; public class CommandLinePanel extends JPanel { private final List<JComponent> myComponents; private final JLabel myHintLabel; private int myLastWidth; public CommandLinePanel(Collection<? extends SettingsEditorFragment<?,?>> fragments) { super(); myComponents = ContainerUtil.map(fragments, fragment -> fragment.createEditor()); myHintLabel = ComponentPanelBuilder.createNonWrappingCommentComponent(""); FragmentHintManager manager = new FragmentHintManager(s -> myHintLabel.setText(s), null); manager.registerFragments(fragments); setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); setMinimumSize(new Dimension(500, 30)); buildRows(); addComponentListener(new ComponentAdapter() { @Override public void componentResized(ComponentEvent e) { buildRows(); } }); } public void rebuildRows() { myLastWidth = -1; buildRows(); } private void buildRows() { int parentWidth = Math.max(getWidth(), getMinimumSize().width); if (myLastWidth == parentWidth) return; myLastWidth = parentWidth; removeAll(); JPanel row = new JPanel(new GridBagLayout()); int rowWidth = 0; GridBagConstraints c = new GridBagConstraints(0, 0, 1, 1, 1, 0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, JBUI.emptyInsets(), 0, 0); for (JComponent component : myComponents) { if (!component.isVisible()) continue; int minWidth = component.getMinimumSize().width; if (rowWidth + minWidth > parentWidth) { add(row); add(Box.createVerticalStrut(FragmentedSettingsBuilder.TOP_INSET)); row = new JPanel(new GridBagLayout()); rowWidth = 0; c.gridx = 0; } row.add(component, c.clone()); c.gridx++; rowWidth += minWidth; } add(row); JPanel panel = new JPanel(new BorderLayout()); panel.add(myHintLabel, BorderLayout.WEST); JBDimension size = new JBDimension(100, 20); panel.setMinimumSize(size); panel.setPreferredSize(size); panel.setBorder(JBUI.Borders.emptyLeft(getLeftInset())); add(panel); } public int getLeftInset() { return Arrays.stream(getComponents()).map(component -> FragmentedSettingsBuilder .getLeftInset((JComponent)component)).max(Comparator.comparingInt(o -> o)) .orElse(0); } public static void setMinimumWidth(Component component, int width) { Dimension size = new Dimension(width, component.getMinimumSize().height); component.setMinimumSize(size); component.setPreferredSize(size); } }
platform/platform-api/src/com/intellij/execution/ui/CommandLinePanel.java
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.execution.ui; import com.intellij.openapi.ui.panel.ComponentPanelBuilder; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.JBUI; import javax.swing.*; import java.awt.*; import java.awt.event.ComponentAdapter; import java.awt.event.ComponentEvent; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.List; public class CommandLinePanel extends JPanel { private final List<JComponent> myComponents; private final JLabel myHintLabel; private int myLastWidth; public CommandLinePanel(Collection<? extends SettingsEditorFragment<?,?>> fragments) { super(); myComponents = ContainerUtil.map(fragments, fragment -> fragment.createEditor()); myHintLabel = ComponentPanelBuilder.createNonWrappingCommentComponent(""); FragmentHintManager manager = new FragmentHintManager(s -> myHintLabel.setText(s), null); manager.registerFragments(fragments); setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); setMinimumSize(new Dimension(500, 30)); buildRows(); addComponentListener(new ComponentAdapter() { @Override public void componentResized(ComponentEvent e) { buildRows(); } }); } public void rebuildRows() { myLastWidth = -1; buildRows(); } private void buildRows() { int parentWidth = Math.max(getWidth(), getMinimumSize().width); if (myLastWidth == parentWidth) return; myLastWidth = parentWidth; removeAll(); JPanel row = new JPanel(new GridBagLayout()); int rowWidth = 0; GridBagConstraints c = new GridBagConstraints(0, 0, 1, 1, 1, 0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, JBUI.emptyInsets(), 0, 0); for (JComponent component : myComponents) { if (!component.isVisible()) continue; int minWidth = component.getMinimumSize().width; if (rowWidth + minWidth > parentWidth) { add(row); add(Box.createVerticalStrut(FragmentedSettingsBuilder.TOP_INSET)); row = new JPanel(new GridBagLayout()); rowWidth = 0; c.gridx = 0; } row.add(component, c.clone()); c.gridx++; rowWidth += minWidth; } add(row); JPanel panel = new JPanel(new BorderLayout()); panel.add(myHintLabel, BorderLayout.WEST); Dimension size = new Dimension(100, 20); panel.setMinimumSize(size); panel.setPreferredSize(size); panel.setBorder(JBUI.Borders.emptyLeft(getLeftInset())); add(panel); } public int getLeftInset() { return Arrays.stream(getComponents()).map(component -> FragmentedSettingsBuilder .getLeftInset((JComponent)component)).max(Comparator.comparingInt(o -> o)) .orElse(0); } public static void setMinimumWidth(Component component, int width) { Dimension size = new Dimension(width, component.getMinimumSize().height); component.setMinimumSize(size); component.setPreferredSize(size); } }
IDEA-242925 [new run config UI] please add tooltips to the fields without labels GitOrigin-RevId: 387f0ccc355ee557ce46b9084e3f170f54e7ab7b
platform/platform-api/src/com/intellij/execution/ui/CommandLinePanel.java
IDEA-242925 [new run config UI] please add tooltips to the fields without labels
Java
apache-2.0
554339053127c86823b8d391a7672b698bdde4b8
0
dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android
package org.commcare.android.tasks; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.content.SharedPreferences.Editor; import android.database.Cursor; import android.net.http.AndroidHttpClient; import android.util.Log; import net.sqlcipher.database.SQLiteDatabase; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.conn.ConnectTimeoutException; import org.commcare.android.crypt.CryptUtil; import org.commcare.android.database.SqlStorage; import org.commcare.android.database.app.models.UserKeyRecord; import org.commcare.android.database.user.models.ACase; import org.commcare.suite.model.User; import org.commcare.android.javarosa.AndroidLogger; import org.commcare.android.net.HttpRequestGenerator; import org.commcare.android.tasks.templates.CommCareTask; import org.commcare.android.util.AndroidStreamUtil; import org.commcare.android.util.AndroidStreamUtil.StreamReadObserver; import org.commcare.android.util.CommCareUtil; import org.commcare.android.util.SessionUnavailableException; import org.commcare.android.util.bitcache.BitCache; import org.commcare.android.util.bitcache.BitCacheFactory; import org.commcare.cases.ledger.Ledger; import org.commcare.cases.ledger.LedgerPurgeFilter; import org.commcare.cases.util.CasePurgeFilter; import org.commcare.dalvik.application.CommCareApp; import org.commcare.dalvik.application.CommCareApplication; import org.commcare.dalvik.odk.provider.FormsProviderAPI.FormsColumns; import org.commcare.dalvik.services.CommCareSessionService; import org.commcare.data.xml.DataModelPullParser; import org.commcare.resources.model.CommCareOTARestoreListener; import org.commcare.xml.AndroidTransactionParserFactory; import org.javarosa.core.model.condition.EvaluationContext; import org.javarosa.core.model.instance.AbstractTreeElement; import org.javarosa.core.model.instance.DataInstance; import org.javarosa.core.model.instance.TreeReference; import org.javarosa.core.reference.InvalidReferenceException; import org.javarosa.core.reference.ReferenceManager; import org.javarosa.core.services.Logger; import org.javarosa.core.services.storage.IStorageIterator; import org.javarosa.core.services.storage.StorageFullException; import org.javarosa.core.util.PropertyUtils; import org.javarosa.model.xform.XPathReference; import org.javarosa.xml.util.InvalidStructureException; import org.javarosa.xml.util.UnfullfilledRequirementsException; import org.xmlpull.v1.XmlPullParserException; import java.io.BufferedInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.SocketTimeoutException; import java.net.UnknownHostException; import java.util.Date; import java.util.Hashtable; import java.util.NoSuchElementException; import java.util.Vector; import javax.crypto.SecretKey; /** * @author ctsims */ public abstract class DataPullTask<R> extends CommCareTask<Void, Integer, Integer, R> implements CommCareOTARestoreListener { String server; String keyProvider; String username; String password; Context c; int mCurrentProgress = -1; int mTotalItems = -1; long mSyncStartTime; private boolean wasKeyLoggedIn = false; public static final int DATA_PULL_TASK_ID = 10; public static final int DOWNLOAD_SUCCESS = 0; public static final int AUTH_FAILED = 1; public static final int BAD_DATA = 2; public static final int UNKNOWN_FAILURE = 4; public static final int UNREACHABLE_HOST = 8; public static final int CONNECTION_TIMEOUT = 16; public static final int SERVER_ERROR = 32; public static final int PROGRESS_STARTED = 0; public static final int PROGRESS_CLEANED = 1; public static final int PROGRESS_AUTHED = 2; public static final int PROGRESS_DONE= 4; public static final int PROGRESS_RECOVERY_NEEDED= 8; public static final int PROGRESS_RECOVERY_STARTED= 16; public static final int PROGRESS_RECOVERY_FAIL_SAFE = 32; public static final int PROGRESS_RECOVERY_FAIL_BAD = 64; public static final int PROGRESS_PROCESSING = 128; public static final int PROGRESS_DOWNLOADING = 256; /** * Whether to enable loading this data from a local asset for * debug/testing. * * This flag should never be set to true on a prod build or in VC * TODO: It should be an error for "debuggable" to be off and this flag * to be true */ private static final boolean DEBUG_LOAD_FROM_LOCAL = false; private InputStream mDebugStream; public DataPullTask(String username, String password, String server, String keyProvider, Context c) { this.server = server; this.keyProvider = keyProvider; this.username = username; this.password = password; this.c = c; this.taskId = DATA_PULL_TASK_ID; TAG = DataPullTask.class.getSimpleName(); } @Override protected void onCancelled() { super.onCancelled(); if(wasKeyLoggedIn) { CommCareApplication._().releaseUserResourcesAndServices(); } } @Override protected Integer doTaskBackground(Void... params) { // Don't try to sync if logging out is occuring if (!CommCareSessionService.sessionAliveLock.tryLock()) { // TODO PLM: once this task is refactored into manageable // components, it should use the ManagedAsyncTask pattern of // checking for isCancelled() and aborting at safe places. return UNKNOWN_FAILURE; } // Wrap in a 'try' to enable a 'finally' close that releases the // sessionAliveLock. try { publishProgress(PROGRESS_STARTED); CommCareApp app = CommCareApplication._().getCurrentApp(); SharedPreferences prefs = app.getAppPreferences(); String keyServer = prefs.getString("key_server", null); mTotalItems = -1; mCurrentProgress = -1; //Whether or not we should be generating the first key boolean useExternalKeys = !(keyServer == null || keyServer.equals("")); boolean loginNeeded = true; boolean useRequestFlags = false; try { loginNeeded = !CommCareApplication._().getSession().isActive(); } catch(SessionUnavailableException sue) { //expected if we aren't initialized. } int responseError = UNKNOWN_FAILURE; //This should be per _user_, not per app prefs.edit().putLong("last-ota-restore", new Date().getTime()).commit(); HttpRequestGenerator requestor = new HttpRequestGenerator(username, password); AndroidTransactionParserFactory factory = new AndroidTransactionParserFactory(c, requestor) { boolean publishedAuth = false; @Override public void reportProgress(int progress) { if(!publishedAuth) { DataPullTask.this.publishProgress(PROGRESS_AUTHED,progress); publishedAuth = true; } } }; Logger.log(AndroidLogger.TYPE_USER, "Starting Sync"); long bytesRead = -1; UserKeyRecord ukr = null; try { //This is a dangerous way to do this (the null settings), should revisit later if(loginNeeded) { if(!useExternalKeys) { //Get the key SecretKey newKey = CryptUtil.generateSemiRandomKey(); if(newKey == null) { this.publishProgress(PROGRESS_DONE); return UNKNOWN_FAILURE; } String sandboxId = PropertyUtils.genUUID().replace("-", ""); ukr = new UserKeyRecord(username, UserKeyRecord.generatePwdHash(password), CryptUtil.wrapKey(newKey.getEncoded(),password), new Date(), new Date(Long.MAX_VALUE), sandboxId); } else { ukr = ManageKeyRecordTask.getCurrentValidRecord(app, username, password, true); if(ukr == null) { Logger.log(AndroidLogger.TYPE_ERROR_ASSERTION, "Shouldn't be able to not have a valid key record when OTA restoring with a key server"); this.publishProgress(PROGRESS_DONE); return UNKNOWN_FAILURE; } } //add to transaction parser factory byte[] wrappedKey = CryptUtil.wrapKey(ukr.getEncryptedKey(),password); factory.initUserParser(wrappedKey); } else { factory.initUserParser(CommCareApplication._().getSession().getLoggedInUser().getWrappedKey()); //Only purge cases if we already had a logged in user. Otherwise we probably can't read the DB. purgeCases(); useRequestFlags = true; } //Either way, don't re-do this step this.publishProgress(PROGRESS_CLEANED); int responseCode = -1; HttpResponse response = null; //This isn't awesome, but it's hard to work this in in a cleaner way if(DEBUG_LOAD_FROM_LOCAL) { try { mDebugStream = ReferenceManager._().DeriveReference("jr://asset/payload.xml").getStream(); } catch(InvalidReferenceException ire) { throw new IOException("No payload available at jr://asset/payload.xml"); } responseCode = 200; } else { response = requestor.makeCaseFetchRequest(server, useRequestFlags); responseCode = response.getStatusLine().getStatusCode(); } Logger.log(AndroidLogger.TYPE_USER, "Request opened. Response code: " + responseCode); if(responseCode == 401) { //If we logged in, we need to drop those credentials if(loginNeeded) { CommCareApplication._().releaseUserResourcesAndServices(); } Logger.log(AndroidLogger.TYPE_USER, "Bad Auth Request for user!|" + username); return AUTH_FAILED; } else if(responseCode >= 200 && responseCode < 300) { if(loginNeeded) { //This is necessary (currently) to make sure that data //is encoded. Probably a better way to do this. CommCareApplication._().startUserSession(CryptUtil.unWrapKey(ukr.getEncryptedKey(), password), ukr); wasKeyLoggedIn = true; } this.publishProgress(PROGRESS_AUTHED,0); Logger.log(AndroidLogger.TYPE_USER, "Remote Auth Successful|" + username); try { BitCache cache = writeResponseToCache(response); InputStream cacheIn = cache.retrieveCache(); String syncToken = readInput(cacheIn, factory); updateUserSyncToken(syncToken); //record when we last synced Editor e = prefs.edit(); e.putLong("last-succesful-sync", new Date().getTime()); e.commit(); if(loginNeeded) { CommCareApplication._().getAppStorage(UserKeyRecord.class).write(ukr); } //Let anyone who is listening know! Intent i = new Intent("org.commcare.dalvik.api.action.data.update"); this.c.sendBroadcast(i); Logger.log(AndroidLogger.TYPE_USER, "User Sync Successful|" + username); this.publishProgress(PROGRESS_DONE); return DOWNLOAD_SUCCESS; } catch (InvalidStructureException e) { e.printStackTrace(); //TODO: Dump more details!!! Logger.log(AndroidLogger.TYPE_USER, "User Sync failed due to bad payload|" + e.getMessage()); return BAD_DATA; } catch (XmlPullParserException e) { e.printStackTrace(); Logger.log(AndroidLogger.TYPE_USER, "User Sync failed due to bad payload|" + e.getMessage()); return BAD_DATA; } catch (UnfullfilledRequirementsException e) { e.printStackTrace(); Logger.log(AndroidLogger.TYPE_ERROR_ASSERTION, "User sync failed oddly, unfulfilled reqs |" + e.getMessage()); } catch (IllegalStateException e) { e.printStackTrace(); Logger.log(AndroidLogger.TYPE_ERROR_ASSERTION, "User sync failed oddly, ISE |" + e.getMessage()); } catch (StorageFullException e) { e.printStackTrace(); Logger.log(AndroidLogger.TYPE_ERROR_ASSERTION, "Storage Full during user sync |" + e.getMessage()); } } else if(responseCode == 412) { //Our local state is bad. We need to do a full restore. int returnCode = recover(requestor, factory); if(returnCode == PROGRESS_DONE) { //All set! Awesome recovery this.publishProgress(PROGRESS_DONE); return DOWNLOAD_SUCCESS; } else if(returnCode == PROGRESS_RECOVERY_FAIL_SAFE) { //Things didn't go super well, but they might next time! //wipe our login if one happened if(loginNeeded) { CommCareApplication._().releaseUserResourcesAndServices(); } this.publishProgress(PROGRESS_DONE); return UNKNOWN_FAILURE; } else if(returnCode == PROGRESS_RECOVERY_FAIL_BAD) { //WELL! That wasn't so good. TODO: Is there anything //we can do about this? //wipe our login if one happened if(loginNeeded) { CommCareApplication._().releaseUserResourcesAndServices(); } this.publishProgress(PROGRESS_DONE); return UNKNOWN_FAILURE; } if(loginNeeded) { CommCareApplication._().releaseUserResourcesAndServices(); } } else if(responseCode == 500) { if(loginNeeded) { CommCareApplication._().releaseUserResourcesAndServices(); } Logger.log(AndroidLogger.TYPE_USER, "500 Server Error|" + username); return SERVER_ERROR; } } catch (SocketTimeoutException e) { e.printStackTrace(); Logger.log(AndroidLogger.TYPE_WARNING_NETWORK, "Timed out listening to receive data during sync"); responseError = CONNECTION_TIMEOUT; } catch (ConnectTimeoutException e) { e.printStackTrace(); Logger.log(AndroidLogger.TYPE_WARNING_NETWORK, "Timed out listening to receive data during sync"); responseError = CONNECTION_TIMEOUT; } catch (ClientProtocolException e) { e.printStackTrace(); Logger.log(AndroidLogger.TYPE_WARNING_NETWORK, "Couldn't sync due network error|" + e.getMessage()); } catch (UnknownHostException e) { Logger.log(AndroidLogger.TYPE_WARNING_NETWORK, "Couldn't sync due to bad network"); responseError = UNREACHABLE_HOST; } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); Logger.log(AndroidLogger.TYPE_WARNING_NETWORK, "Couldn't sync due to IO Error|" + e.getMessage()); } catch (SessionUnavailableException sue) { // TODO PLM: eventually take out this catch. These should be // checked locally //TODO: Keys were lost somehow. sue.printStackTrace(); } if(loginNeeded) { CommCareApplication._().releaseUserResourcesAndServices(); } this.publishProgress(PROGRESS_DONE); return responseError; } finally { CommCareSessionService.sessionAliveLock.unlock(); } } /** * Retrieves the HttpResponse stream and writes it to an initialized safe * local cache. Notifies listeners of progress through the download if its * size is available. * * @throws IOException If there is an issue reading or writing the response. */ private BitCache writeResponseToCache(HttpResponse response) throws IOException { BitCache cache = null; try { final long dataSizeGuess = guessDataSize(response); cache = BitCacheFactory.getCache(c, dataSizeGuess); cache.initializeCache(); OutputStream cacheOut = cache.getCacheStream(); InputStream input; if(DEBUG_LOAD_FROM_LOCAL) { input = this.mDebugStream; } else { input = AndroidHttpClient.getUngzippedContent(response.getEntity()); } Log.i("commcare-network", "Starting network read, expected content size: " + dataSizeGuess + "b"); AndroidStreamUtil.writeFromInputToOutput(new BufferedInputStream(input), cacheOut, new StreamReadObserver() { long lastOutput = 0; /** The notification threshold. **/ static final int PERCENT_INCREASE_THRESHOLD = 4; @Override public void notifyCurrentCount(long bytesRead) { boolean notify = false; //We always wanna notify when we get our first bytes if(lastOutput == 0) { Log.i("commcare-network", "First" + bytesRead + " bytes received from network: "); notify = true; } //After, if we don't know how much data to expect, we can't do //anything useful if(dataSizeGuess == -1) { //set this so the first notification up there doesn't keep firing lastOutput = bytesRead; return; } int percentIncrease = (int)(((bytesRead - lastOutput) * 100) / dataSizeGuess); //Now see if we're over the reporting threshold //TODO: Is this actually necessary? In theory this shouldn't //matter due to android task polling magic? notify = percentIncrease > PERCENT_INCREASE_THRESHOLD; if(notify) { lastOutput = bytesRead; int totalRead = (int)(((bytesRead) * 100) / dataSizeGuess); publishProgress(PROGRESS_DOWNLOADING, totalRead); } } }); return cache; //If something goes wrong while we're reading into the cache //we may need to free the storage we reserved. } catch (IOException e) { if(cache != null) { cache.release(); } throw e; } } /** * Get an estimation of how large the provided response is. * @return -1 for unknown. */ private long guessDataSize(HttpResponse response) { if(DEBUG_LOAD_FROM_LOCAL) { try { //Note: this is really stupid, but apparently you can't //retrieve the size of Assets due to some bullshit, so //this is the closest you get. return this.mDebugStream.available(); } catch (IOException e) { return -1; } } if(response.containsHeader("Content-Length")) { String length = response.getFirstHeader("Content-Length").getValue(); try{ return Long.parseLong(length); } catch(Exception e) { //Whatever. } } return -1; } //TODO: This and the normal sync share a ton of code. It's hard to really... figure out the right way to private int recover(HttpRequestGenerator requestor, AndroidTransactionParserFactory factory) { this.publishProgress(PROGRESS_RECOVERY_NEEDED); Logger.log(AndroidLogger.TYPE_USER, "Sync Recovery Triggered"); BitCache cache = null; //This chunk is the safe field of operations which can all fail in IO in such a way that we can //just report back that things didn't work and don't need to attempt any recovery or additional //work try { //Make a new request without all of the flags HttpResponse response = requestor.makeCaseFetchRequest(server, false); int responseCode = response.getStatusLine().getStatusCode(); //We basically only care about a positive response, here. Anything else would have been caught by the other request. if(!(responseCode >= 200 && responseCode < 300)) { return PROGRESS_RECOVERY_FAIL_SAFE; } //Grab a cache. The plan is to download the incoming data, wipe (move) the existing db, and then //restore fresh from the downloaded file cache = writeResponseToCache(response); } catch(IOException e) { e.printStackTrace(); //Ok, well, we're bailing here, but we didn't make any changes Logger.log(AndroidLogger.TYPE_USER, "Sync Recovery Failed due to IOException|" + e.getMessage()); return PROGRESS_RECOVERY_FAIL_SAFE; } this.publishProgress(PROGRESS_RECOVERY_STARTED); Logger.log(AndroidLogger.TYPE_USER, "Sync Recovery payload downloaded"); //Ok. Here's where things get real. We now have a stable copy of the fresh data from the //server, so it's "safe" for us to wipe the casedb copy of it. //CTS: We're not doing this in a super good way right now, need to be way more fault tolerant. //this is the temporary implementation of everything past this point //Wipe storage //TODO: move table instead. Should be straightforward with sandboxed db's CommCareApplication._().getUserStorage(ACase.STORAGE_KEY, ACase.class).removeAll(); String failureReason = ""; try { //Get new data String syncToken = readInput(cache.retrieveCache(), factory); updateUserSyncToken(syncToken); Logger.log(AndroidLogger.TYPE_USER, "Sync Recovery Succesful"); return PROGRESS_DONE; } catch (InvalidStructureException e) { e.printStackTrace(); failureReason = e.getMessage(); } catch (XmlPullParserException e) { e.printStackTrace(); failureReason = e.getMessage(); } catch (UnfullfilledRequirementsException e) { e.printStackTrace(); failureReason = e.getMessage(); } catch (StorageFullException e) { e.printStackTrace(); failureReason = e.getMessage(); } //These last two aren't a sign that the incoming data is bad, but //we still can't recover from them usefully catch (SessionUnavailableException e) { e.printStackTrace(); failureReason = e.getMessage(); } catch (IOException e) { e.printStackTrace(); failureReason = e.getMessage(); } finally { //destroy temp file cache.release(); } //OK, so we would have returned success by now if things had worked out, which means that instead we got an error //while trying to parse everything out. We need to recover from that error here and rollback the changes //TODO: Roll back changes Logger.log(AndroidLogger.TYPE_USER, "Sync recovery failed|" + failureReason); return PROGRESS_RECOVERY_FAIL_BAD; } //Utility method for debugging of people need to dump the response b private void dumpCache(BitCache cache) { try{ ByteArrayOutputStream baos = new ByteArrayOutputStream(); AndroidStreamUtil.writeFromInputToOutput(cache.retrieveCache(), baos); Log.d(TAG, new String(baos.toByteArray())); } catch(IOException e) { e.printStackTrace(); } } private void updateUserSyncToken(String syncToken) throws StorageFullException { SqlStorage<User> storage = CommCareApplication._().getUserStorage("USER", User.class); try { User u = storage.getRecordForValue(User.META_USERNAME, username); u.setLastSyncToken(syncToken); storage.write(u); } catch(NoSuchElementException nsee) { //TODO: Something here? Maybe figure out if we downloaded a user from the server and attach the data to it? } } private void purgeCases() { long start = System.currentTimeMillis(); //We need to determine if we're using ownership for purging. For right now, only in sync mode Vector<String> owners = new Vector<String>(); Vector<String> users = new Vector<String>(); for(IStorageIterator<User> userIterator = CommCareApplication._().getUserStorage(User.class).iterate(); userIterator.hasMore();) { String id = userIterator.nextRecord().getUniqueId(); owners.addElement(id); users.addElement(id); } //Now add all of the relevant groups //TODO: Wow. This is.... kind of megasketch for(String userId : users) { DataInstance instance = CommCareUtil.loadFixture("user-groups", userId); if(instance == null) { continue; } EvaluationContext ec = new EvaluationContext(instance); for(TreeReference ref : ec.expandReference(XPathReference.getPathExpr("/groups/group/@id").getReference())) { AbstractTreeElement<AbstractTreeElement> idelement = ec.resolveReference(ref); if(idelement.getValue() != null) { owners.addElement(idelement.getValue().uncast().getString()); } } } SqlStorage<ACase> storage = CommCareApplication._().getUserStorage(ACase.STORAGE_KEY, ACase.class); CasePurgeFilter filter = new CasePurgeFilter(storage, owners); int removedCases = storage.removeAll(filter).size(); SqlStorage<Ledger> stockStorage = CommCareApplication._().getUserStorage(Ledger.STORAGE_KEY, Ledger.class); LedgerPurgeFilter stockFilter = new LedgerPurgeFilter(stockStorage, storage); int removedLedgers = stockStorage.removeAll(stockFilter).size(); long taken = System.currentTimeMillis() - start; Logger.log(AndroidLogger.TYPE_MAINTENANCE, String.format("Purged [%d Case, %d Ledger] records in %dms", removedCases, removedLedgers, taken)); } private String readInput(InputStream stream, AndroidTransactionParserFactory factory) throws InvalidStructureException, IOException, XmlPullParserException, UnfullfilledRequirementsException, SessionUnavailableException{ DataModelPullParser parser; factory.initCaseParser(); factory.initStockParser(); Hashtable<String,String> formNamespaces = new Hashtable<String, String>(); for(String xmlns : CommCareApplication._().getCommCarePlatform().getInstalledForms()) { Cursor cur = c.getContentResolver().query(CommCareApplication._().getCommCarePlatform().getFormContentUri(xmlns), new String[] {FormsColumns.FORM_FILE_PATH}, null, null, null); if(cur.moveToFirst()) { String path = cur.getString(cur.getColumnIndex(FormsColumns.FORM_FILE_PATH)); formNamespaces.put(xmlns, path); } else { throw new RuntimeException("No form registered for xmlns at content URI: " + CommCareApplication._().getCommCarePlatform().getFormContentUri(xmlns)); } cur.close(); } factory.initFormInstanceParser(formNamespaces); // SqlIndexedStorageUtility<FormRecord> formRecordStorge = CommCareApplication._().getStorage(FormRecord.STORAGE_KEY, FormRecord.class); // // for(SqlStorageIterator<FormRecord> i = formRecordStorge.iterate(); i.hasNext() ;) { // // } //this is _really_ coupled, but we'll tolerate it for now because of the absurd performance gains SQLiteDatabase db = CommCareApplication._().getUserDbHandle(); try { db.beginTransaction(); parser = new DataModelPullParser(stream, factory, this); parser.parse(); db.setTransactionSuccessful(); } finally { db.endTransaction(); } //Return the sync token ID return factory.getSyncToken(); } //BEGIN - OTA Listener methods below - Note that most of the methods //below weren't really implemented @Override public void onUpdate(int numberCompleted) { mCurrentProgress = numberCompleted; int miliSecElapsed = (int)(System.currentTimeMillis() - mSyncStartTime); this.publishProgress(PROGRESS_PROCESSING, mCurrentProgress, mTotalItems, miliSecElapsed); } @Override public void setTotalForms(int totalItemCount) { mTotalItems = totalItemCount; mCurrentProgress = 0; mSyncStartTime = System.currentTimeMillis(); this.publishProgress(PROGRESS_PROCESSING, mCurrentProgress, mTotalItems, 0); } @Override public void statusUpdate(int statusNumber) {} @Override public void refreshView() {} @Override public void getCredentials() {} @Override public void promptRetry(String msg) {} @Override public void onSuccess() {} @Override public void onFailure(String failMessage) {} }
app/src/org/commcare/android/tasks/DataPullTask.java
package org.commcare.android.tasks; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.content.SharedPreferences.Editor; import android.database.Cursor; import android.net.http.AndroidHttpClient; import android.util.Log; import net.sqlcipher.database.SQLiteDatabase; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.conn.ConnectTimeoutException; import org.commcare.android.crypt.CryptUtil; import org.commcare.android.database.SqlStorage; import org.commcare.android.database.app.models.UserKeyRecord; import org.commcare.android.database.user.models.ACase; import org.commcare.suite.model.User; import org.commcare.android.javarosa.AndroidLogger; import org.commcare.android.net.HttpRequestGenerator; import org.commcare.android.tasks.templates.CommCareTask; import org.commcare.android.util.AndroidStreamUtil; import org.commcare.android.util.AndroidStreamUtil.StreamReadObserver; import org.commcare.android.util.CommCareUtil; import org.commcare.android.util.SessionUnavailableException; import org.commcare.android.util.bitcache.BitCache; import org.commcare.android.util.bitcache.BitCacheFactory; import org.commcare.cases.ledger.Ledger; import org.commcare.cases.ledger.LedgerPurgeFilter; import org.commcare.cases.util.CasePurgeFilter; import org.commcare.dalvik.application.CommCareApp; import org.commcare.dalvik.application.CommCareApplication; import org.commcare.dalvik.odk.provider.FormsProviderAPI.FormsColumns; import org.commcare.dalvik.services.CommCareSessionService; import org.commcare.data.xml.DataModelPullParser; import org.commcare.resources.model.CommCareOTARestoreListener; import org.commcare.xml.AndroidTransactionParserFactory; import org.javarosa.core.model.condition.EvaluationContext; import org.javarosa.core.model.instance.AbstractTreeElement; import org.javarosa.core.model.instance.DataInstance; import org.javarosa.core.model.instance.TreeReference; import org.javarosa.core.reference.InvalidReferenceException; import org.javarosa.core.reference.ReferenceManager; import org.javarosa.core.services.Logger; import org.javarosa.core.services.storage.IStorageIterator; import org.javarosa.core.services.storage.StorageFullException; import org.javarosa.core.util.PropertyUtils; import org.javarosa.model.xform.XPathReference; import org.javarosa.xml.util.InvalidStructureException; import org.javarosa.xml.util.UnfullfilledRequirementsException; import org.xmlpull.v1.XmlPullParserException; import java.io.BufferedInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.SocketTimeoutException; import java.net.UnknownHostException; import java.util.Date; import java.util.Hashtable; import java.util.NoSuchElementException; import java.util.Vector; import javax.crypto.SecretKey; /** * @author ctsims */ public abstract class DataPullTask<R> extends CommCareTask<Void, Integer, Integer, R> implements CommCareOTARestoreListener { String server; String keyProvider; String username; String password; Context c; int mCurrentProgress = -1; int mTotalItems = -1; long mSyncStartTime; private boolean wasKeyLoggedIn = false; public static final int DATA_PULL_TASK_ID = 10; public static final int DOWNLOAD_SUCCESS = 0; public static final int AUTH_FAILED = 1; public static final int BAD_DATA = 2; public static final int UNKNOWN_FAILURE = 4; public static final int UNREACHABLE_HOST = 8; public static final int CONNECTION_TIMEOUT = 16; public static final int SERVER_ERROR = 32; public static final int PROGRESS_STARTED = 0; public static final int PROGRESS_CLEANED = 1; public static final int PROGRESS_AUTHED = 2; public static final int PROGRESS_DONE= 4; public static final int PROGRESS_RECOVERY_NEEDED= 8; public static final int PROGRESS_RECOVERY_STARTED= 16; public static final int PROGRESS_RECOVERY_FAIL_SAFE = 32; public static final int PROGRESS_RECOVERY_FAIL_BAD = 64; public static final int PROGRESS_PROCESSING = 128; public static final int PROGRESS_DOWNLOADING = 256; /** * Whether to enable loading this data from a local asset for * debug/testing. * * This flag should never be set to true on a prod build or in VC * TODO: It should be an error for "debuggable" to be off and this flag * to be true */ private static final boolean DEBUG_LOAD_FROM_LOCAL = false; private InputStream mDebugStream; public DataPullTask(String username, String password, String server, String keyProvider, Context c) { this.server = server; this.keyProvider = keyProvider; this.username = username; this.password = password; this.c = c; this.taskId = DATA_PULL_TASK_ID; TAG = DataPullTask.class.getSimpleName(); } @Override protected void onCancelled() { super.onCancelled(); if(wasKeyLoggedIn) { CommCareApplication._().releaseUserResourcesAndServices(); } } @Override protected Integer doTaskBackground(Void... params) { // Don't try to sync if logging out is occuring if (!CommCareSessionService.sessionAliveLock.tryLock()) { // TODO PLM: once this task is refactored into manageable // components, it should use the ManagedAsyncTask pattern of // checking for isCancelled() and aborting at safe places. return UNKNOWN_FAILURE; } // Wrap in a 'try' to enable a 'finally' close that releases the // sessionAliveLock. try { publishProgress(PROGRESS_STARTED); CommCareApp app = CommCareApplication._().getCurrentApp(); SharedPreferences prefs = app.getAppPreferences(); String keyServer = prefs.getString("key_server", null); mTotalItems = -1; mCurrentProgress = -1; //Whether or not we should be generating the first key boolean useExternalKeys = !(keyServer == null || keyServer.equals("")); boolean loginNeeded = true; boolean useRequestFlags = false; try { loginNeeded = !CommCareApplication._().getSession().isActive(); } catch(SessionUnavailableException sue) { //expected if we aren't initialized. } int responseError = UNKNOWN_FAILURE; //This should be per _user_, not per app prefs.edit().putLong("last-ota-restore", new Date().getTime()).commit(); HttpRequestGenerator requestor = new HttpRequestGenerator(username, password); AndroidTransactionParserFactory factory = new AndroidTransactionParserFactory(c, requestor) { boolean publishedAuth = false; @Override public void reportProgress(int progress) { if(!publishedAuth) { DataPullTask.this.publishProgress(PROGRESS_AUTHED,progress); publishedAuth = true; } } }; Logger.log(AndroidLogger.TYPE_USER, "Starting Sync"); long bytesRead = -1; UserKeyRecord ukr = null; try { //This is a dangerous way to do this (the null settings), should revisit later if(loginNeeded) { if(!useExternalKeys) { //Get the key SecretKey newKey = CryptUtil.generateSemiRandomKey(); if(newKey == null) { this.publishProgress(PROGRESS_DONE); return UNKNOWN_FAILURE; } String sandboxId = PropertyUtils.genUUID().replace("-", ""); ukr = new UserKeyRecord(username, UserKeyRecord.generatePwdHash(password), CryptUtil.wrapKey(newKey.getEncoded(),password), new Date(), new Date(Long.MAX_VALUE), sandboxId); } else { ukr = ManageKeyRecordTask.getCurrentValidRecord(app, username, password, true); if(ukr == null) { Logger.log(AndroidLogger.TYPE_ERROR_ASSERTION, "Shouldn't be able to not have a valid key record when OTA restoring with a key server"); this.publishProgress(PROGRESS_DONE); return UNKNOWN_FAILURE; } } //add to transaction parser factory byte[] wrappedKey = CryptUtil.wrapKey(ukr.getEncryptedKey(),password); factory.initUserParser(wrappedKey); } else { factory.initUserParser(CommCareApplication._().getSession().getLoggedInUser().getWrappedKey()); //Only purge cases if we already had a logged in user. Otherwise we probably can't read the DB. purgeCases(); useRequestFlags = true; } //Either way, don't re-do this step this.publishProgress(PROGRESS_CLEANED); int responseCode = -1; HttpResponse response = null; //This isn't awesome, but it's hard to work this in in a cleaner way if(DEBUG_LOAD_FROM_LOCAL) { try { mDebugStream = ReferenceManager._().DeriveReference("jr://asset/payload.xml").getStream(); } catch(InvalidReferenceException ire) { throw new IOException("No payload available at jr://asset/payload.xml"); } responseCode = 200; } else { response = requestor.makeCaseFetchRequest(server, useRequestFlags); responseCode = response.getStatusLine().getStatusCode(); } Logger.log(AndroidLogger.TYPE_USER, "Request opened. Response code: " + responseCode); if(responseCode == 401) { //If we logged in, we need to drop those credentials if(loginNeeded) { CommCareApplication._().releaseUserResourcesAndServices(); } Logger.log(AndroidLogger.TYPE_USER, "Bad Auth Request for user!|" + username); return AUTH_FAILED; } else if(responseCode >= 200 && responseCode < 300) { if(loginNeeded) { //This is necessary (currently) to make sure that data //is encoded. Probably a better way to do this. CommCareApplication._().startUserSession(CryptUtil.unWrapKey(ukr.getEncryptedKey(), password), ukr); wasKeyLoggedIn = true; } this.publishProgress(PROGRESS_AUTHED,0); Logger.log(AndroidLogger.TYPE_USER, "Remote Auth Successful|" + username); try { BitCache cache = writeResponseToCache(response); InputStream cacheIn = cache.retrieveCache(); String syncToken = readInput(cacheIn, factory); updateUserSyncToken(syncToken); //record when we last synced Editor e = prefs.edit(); e.putLong("last-succesful-sync", new Date().getTime()); e.commit(); if(loginNeeded) { CommCareApplication._().getAppStorage(UserKeyRecord.class).write(ukr); } //Let anyone who is listening know! Intent i = new Intent("org.commcare.dalvik.api.action.data.update"); this.c.sendBroadcast(i); Logger.log(AndroidLogger.TYPE_USER, "User Sync Successful|" + username); this.publishProgress(PROGRESS_DONE); return DOWNLOAD_SUCCESS; } catch (InvalidStructureException e) { e.printStackTrace(); //TODO: Dump more details!!! Logger.log(AndroidLogger.TYPE_USER, "User Sync failed due to bad payload|" + e.getMessage()); return BAD_DATA; } catch (XmlPullParserException e) { e.printStackTrace(); Logger.log(AndroidLogger.TYPE_USER, "User Sync failed due to bad payload|" + e.getMessage()); return BAD_DATA; } catch (UnfullfilledRequirementsException e) { e.printStackTrace(); Logger.log(AndroidLogger.TYPE_ERROR_ASSERTION, "User sync failed oddly, unfulfilled reqs |" + e.getMessage()); } catch (IllegalStateException e) { e.printStackTrace(); Logger.log(AndroidLogger.TYPE_ERROR_ASSERTION, "User sync failed oddly, ISE |" + e.getMessage()); } catch (StorageFullException e) { e.printStackTrace(); Logger.log(AndroidLogger.TYPE_ERROR_ASSERTION, "Storage Full during user sync |" + e.getMessage()); } } else if(responseCode == 412) { //Our local state is bad. We need to do a full restore. int returnCode = recover(requestor, factory); if(returnCode == PROGRESS_DONE) { //All set! Awesome recovery this.publishProgress(PROGRESS_DONE); return DOWNLOAD_SUCCESS; } else if(returnCode == PROGRESS_RECOVERY_FAIL_SAFE) { //Things didn't go super well, but they might next time! //wipe our login if one happened if(loginNeeded) { CommCareApplication._().releaseUserResourcesAndServices(); } this.publishProgress(PROGRESS_DONE); return UNKNOWN_FAILURE; } else if(returnCode == PROGRESS_RECOVERY_FAIL_BAD) { //WELL! That wasn't so good. TODO: Is there anything //we can do about this? //wipe our login if one happened if(loginNeeded) { CommCareApplication._().releaseUserResourcesAndServices(); } this.publishProgress(PROGRESS_DONE); return UNKNOWN_FAILURE; } if(loginNeeded) { CommCareApplication._().releaseUserResourcesAndServices(); } } else if(responseCode == 500) { if(loginNeeded) { CommCareApplication._().releaseUserResourcesAndServices(); } Logger.log(AndroidLogger.TYPE_USER, "500 Server Error|" + username); return SERVER_ERROR; } } catch (SocketTimeoutException e) { e.printStackTrace(); Logger.log(AndroidLogger.TYPE_WARNING_NETWORK, "Timed out listening to receive data during sync"); responseError = CONNECTION_TIMEOUT; } catch (ConnectTimeoutException e) { e.printStackTrace(); Logger.log(AndroidLogger.TYPE_WARNING_NETWORK, "Timed out listening to receive data during sync"); responseError = CONNECTION_TIMEOUT; } catch (ClientProtocolException e) { e.printStackTrace(); Logger.log(AndroidLogger.TYPE_WARNING_NETWORK, "Couldn't sync due network error|" + e.getMessage()); } catch (UnknownHostException e) { Logger.log(AndroidLogger.TYPE_WARNING_NETWORK, "Couldn't sync due to bad network"); responseError = UNREACHABLE_HOST; } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); Logger.log(AndroidLogger.TYPE_WARNING_NETWORK, "Couldn't sync due to IO Error|" + e.getMessage()); } catch (SessionUnavailableException sue) { // TODO PLM: eventually take out this catch. These should be // checked locally //TODO: Keys were lost somehow. sue.printStackTrace(); } if(loginNeeded) { CommCareApplication._().releaseUserResourcesAndServices(); } this.publishProgress(PROGRESS_DONE); return responseError; } finally { CommCareSessionService.sessionAliveLock.unlock(); } } /** * Retrieves the HttpResponse stream and writes it to an initialized safe * local cache. Notifies listeners of progress through the download if its * size is available. * * @throws IOException If there is an issue reading or writing the response. */ private BitCache writeResponseToCache(HttpResponse response) throws IOException { BitCache cache = null; try { final long dataSizeGuess = guessDataSize(response); cache = BitCacheFactory.getCache(c, dataSizeGuess); cache.initializeCache(); OutputStream cacheOut = cache.getCacheStream(); InputStream input; if(DEBUG_LOAD_FROM_LOCAL) { input = this.mDebugStream; } else { input = AndroidHttpClient.getUngzippedContent(response.getEntity()); } Log.i("commcare-network", "Starting network read, expected content size: " + dataSizeGuess + "b"); AndroidStreamUtil.writeFromInputToOutput(new BufferedInputStream(input), cacheOut, new StreamReadObserver() { long lastOutput = 0; /** The notification threshold. **/ static final int PERCENT_INCREASE_THRESHOLD = 4; @Override public void notifyCurrentCount(long bytesRead) { boolean notify = false; //We always wanna notify when we get our first bytes if(lastOutput == 0) { Log.i("commcare-network", "First" + bytesRead + " bytes received from network: "); notify = true; } //After, if we don't know how much data to expect, we can't do //anything useful if(dataSizeGuess == -1) { //set this so the first notification up there doesn't keep firing lastOutput = bytesRead; return; } int percentIncrease = (int)(((bytesRead - lastOutput) * 100) / dataSizeGuess); //Now see if we're over the reporting threshold //TODO: Is this actually necessary? In theory this shouldn't //matter due to android task polling magic? notify = percentIncrease > PERCENT_INCREASE_THRESHOLD; if(notify) { lastOutput = bytesRead; int totalRead = (int)(((bytesRead) * 100) / dataSizeGuess); publishProgress(PROGRESS_DOWNLOADING, totalRead); } } }); return cache; //If something goes wrong while we're reading into the cache //we may need to free the storage we reserved. } catch (IOException e) { if(cache != null) { cache.release(); } throw e; } } /** * Get an estimation of how large the provided response is. * @return -1 for unknown. */ private long guessDataSize(HttpResponse response) { if(DEBUG_LOAD_FROM_LOCAL) { try { //Note: this is really stupid, but apparently you can't //retrieve the size of Assets due to some bullshit, so //this is the closest you get. return this.mDebugStream.available(); } catch (IOException e) { return -1; } } if(response.containsHeader("Content-Length")) { String length = response.getFirstHeader("Content-Length").getValue(); try{ return Long.parseLong(length); } catch(Exception e) { //Whatever. } } return -1; } //TODO: This and the normal sync share a ton of code. It's hard to really... figure out the right way to private int recover(HttpRequestGenerator requestor, AndroidTransactionParserFactory factory) { this.publishProgress(PROGRESS_RECOVERY_NEEDED); Logger.log(AndroidLogger.TYPE_USER, "Sync Recovery Triggered"); BitCache cache = null; //This chunk is the safe field of operations which can all fail in IO in such a way that we can //just report back that things didn't work and don't need to attempt any recovery or additional //work try { //Make a new request without all of the flags HttpResponse response = requestor.makeCaseFetchRequest(server, false); int responseCode = response.getStatusLine().getStatusCode(); //We basically only care about a positive response, here. Anything else would have been caught by the other request. if(!(responseCode >= 200 && responseCode < 300)) { return PROGRESS_RECOVERY_FAIL_SAFE; } //Grab a cache. The plan is to download the incoming data, wipe (move) the existing db, and then //restore fresh from the downloaded file cache = writeResponseToCache(response); } catch(IOException e) { e.printStackTrace(); //Ok, well, we're bailing here, but we didn't make any changes Logger.log(AndroidLogger.TYPE_USER, "Sync Recovery Failed due to IOException|" + e.getMessage()); return PROGRESS_RECOVERY_FAIL_SAFE; } this.publishProgress(PROGRESS_RECOVERY_STARTED); Logger.log(AndroidLogger.TYPE_USER, "Sync Recovery payload downloaded"); //Ok. Here's where things get real. We now have a stable copy of the fresh data from the //server, so it's "safe" for us to wipe the casedb copy of it. //CTS: We're not doing this in a super good way right now, need to be way more fault tolerant. //this is the temporary implementation of everything past this point //Wipe storage //TODO: move table instead. Should be straightforward with sandboxed db's CommCareApplication._().getUserStorage(ACase.STORAGE_KEY, ACase.class).removeAll(); String failureReason = ""; try { //Get new data String syncToken = readInput(cache.retrieveCache(), factory); updateUserSyncToken(syncToken); Logger.log(AndroidLogger.TYPE_USER, "Sync Recovery Succesful"); return PROGRESS_DONE; } catch (InvalidStructureException e) { e.printStackTrace(); failureReason = e.getMessage(); } catch (XmlPullParserException e) { e.printStackTrace(); failureReason = e.getMessage(); } catch (UnfullfilledRequirementsException e) { e.printStackTrace(); failureReason = e.getMessage(); } catch (StorageFullException e) { e.printStackTrace(); failureReason = e.getMessage(); } //These last two aren't a sign that the incoming data is bad, but //we still can't recover from them usefully catch (SessionUnavailableException e) { e.printStackTrace(); failureReason = e.getMessage(); } catch (IOException e) { e.printStackTrace(); failureReason = e.getMessage(); } finally { //destroy temp file cache.release(); } //OK, so we would have returned success by now if things had worked out, which means that instead we got an error //while trying to parse everything out. We need to recover from that error here and rollback the changes //TODO: Roll back changes Logger.log(AndroidLogger.TYPE_USER, "Sync recovery failed|" + failureReason); return PROGRESS_RECOVERY_FAIL_BAD; } //Utility method for debugging of people need to dump the response b private void dumpCache(BitCache cache) { try{ ByteArrayOutputStream baos = new ByteArrayOutputStream(); AndroidStreamUtil.writeFromInputToOutput(cache.retrieveCache(), baos); Log.d(TAG, new String(baos.toByteArray())); } catch(IOException e) { e.printStackTrace(); } } private void updateUserSyncToken(String syncToken) throws StorageFullException { SqlStorage<User> storage = CommCareApplication._().getUserStorage(User.class); try { User u = storage.getRecordForValue(User.META_USERNAME, username); u.setLastSyncToken(syncToken); storage.write(u); } catch(NoSuchElementException nsee) { //TODO: Something here? Maybe figure out if we downloaded a user from the server and attach the data to it? } } private void purgeCases() { long start = System.currentTimeMillis(); //We need to determine if we're using ownership for purging. For right now, only in sync mode Vector<String> owners = new Vector<String>(); Vector<String> users = new Vector<String>(); for(IStorageIterator<User> userIterator = CommCareApplication._().getUserStorage(User.class).iterate(); userIterator.hasMore();) { String id = userIterator.nextRecord().getUniqueId(); owners.addElement(id); users.addElement(id); } //Now add all of the relevant groups //TODO: Wow. This is.... kind of megasketch for(String userId : users) { DataInstance instance = CommCareUtil.loadFixture("user-groups", userId); if(instance == null) { continue; } EvaluationContext ec = new EvaluationContext(instance); for(TreeReference ref : ec.expandReference(XPathReference.getPathExpr("/groups/group/@id").getReference())) { AbstractTreeElement<AbstractTreeElement> idelement = ec.resolveReference(ref); if(idelement.getValue() != null) { owners.addElement(idelement.getValue().uncast().getString()); } } } SqlStorage<ACase> storage = CommCareApplication._().getUserStorage(ACase.STORAGE_KEY, ACase.class); CasePurgeFilter filter = new CasePurgeFilter(storage, owners); int removedCases = storage.removeAll(filter).size(); SqlStorage<Ledger> stockStorage = CommCareApplication._().getUserStorage(Ledger.STORAGE_KEY, Ledger.class); LedgerPurgeFilter stockFilter = new LedgerPurgeFilter(stockStorage, storage); int removedLedgers = stockStorage.removeAll(stockFilter).size(); long taken = System.currentTimeMillis() - start; Logger.log(AndroidLogger.TYPE_MAINTENANCE, String.format("Purged [%d Case, %d Ledger] records in %dms", removedCases, removedLedgers, taken)); } private String readInput(InputStream stream, AndroidTransactionParserFactory factory) throws InvalidStructureException, IOException, XmlPullParserException, UnfullfilledRequirementsException, SessionUnavailableException{ DataModelPullParser parser; factory.initCaseParser(); factory.initStockParser(); Hashtable<String,String> formNamespaces = new Hashtable<String, String>(); for(String xmlns : CommCareApplication._().getCommCarePlatform().getInstalledForms()) { Cursor cur = c.getContentResolver().query(CommCareApplication._().getCommCarePlatform().getFormContentUri(xmlns), new String[] {FormsColumns.FORM_FILE_PATH}, null, null, null); if(cur.moveToFirst()) { String path = cur.getString(cur.getColumnIndex(FormsColumns.FORM_FILE_PATH)); formNamespaces.put(xmlns, path); } else { throw new RuntimeException("No form registered for xmlns at content URI: " + CommCareApplication._().getCommCarePlatform().getFormContentUri(xmlns)); } cur.close(); } factory.initFormInstanceParser(formNamespaces); // SqlIndexedStorageUtility<FormRecord> formRecordStorge = CommCareApplication._().getStorage(FormRecord.STORAGE_KEY, FormRecord.class); // // for(SqlStorageIterator<FormRecord> i = formRecordStorge.iterate(); i.hasNext() ;) { // // } //this is _really_ coupled, but we'll tolerate it for now because of the absurd performance gains SQLiteDatabase db = CommCareApplication._().getUserDbHandle(); try { db.beginTransaction(); parser = new DataModelPullParser(stream, factory, this); parser.parse(); db.setTransactionSuccessful(); } finally { db.endTransaction(); } //Return the sync token ID return factory.getSyncToken(); } //BEGIN - OTA Listener methods below - Note that most of the methods //below weren't really implemented @Override public void onUpdate(int numberCompleted) { mCurrentProgress = numberCompleted; int miliSecElapsed = (int)(System.currentTimeMillis() - mSyncStartTime); this.publishProgress(PROGRESS_PROCESSING, mCurrentProgress, mTotalItems, miliSecElapsed); } @Override public void setTotalForms(int totalItemCount) { mTotalItems = totalItemCount; mCurrentProgress = 0; mSyncStartTime = System.currentTimeMillis(); this.publishProgress(PROGRESS_PROCESSING, mCurrentProgress, mTotalItems, 0); } @Override public void statusUpdate(int statusNumber) {} @Override public void refreshView() {} @Override public void getCredentials() {} @Override public void promptRetry(String msg) {} @Override public void onSuccess() {} @Override public void onFailure(String failMessage) {} }
fix user storage
app/src/org/commcare/android/tasks/DataPullTask.java
fix user storage
Java
apache-2.0
bdab34c32f9ebba3a9f8efe40adab5a47d75af99
0
sankarh/hive,sankarh/hive,sankarh/hive,sankarh/hive,sankarh/hive,sankarh/hive,sankarh/hive,sankarh/hive,sankarh/hive
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.metastore; import com.codahale.metrics.Counter; import com.facebook.fb303.FacebookBase; import com.facebook.fb303.fb_status; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Splitter; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.collect.Lists; import com.google.common.util.concurrent.Striped; import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.commons.collections.CollectionUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.AcidConstants; import org.apache.hadoop.hive.common.AcidMetaDataFile; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.ValidReaderWriteIdList; import org.apache.hadoop.hive.common.ValidWriteIdList; import org.apache.hadoop.hive.common.repl.ReplConst; import org.apache.hadoop.hive.metastore.api.*; import org.apache.hadoop.hive.metastore.api.Package; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars; import org.apache.hadoop.hive.metastore.dataconnector.DataConnectorProviderFactory; import org.apache.hadoop.hive.metastore.events.*; import org.apache.hadoop.hive.metastore.messaging.EventMessage; import org.apache.hadoop.hive.metastore.messaging.EventMessage.EventType; import org.apache.hadoop.hive.metastore.metrics.Metrics; import org.apache.hadoop.hive.metastore.metrics.MetricsConstants; import org.apache.hadoop.hive.metastore.metrics.PerfLogger; import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy; import org.apache.hadoop.hive.metastore.txn.CompactionInfo; import org.apache.hadoop.hive.metastore.txn.TxnStore; import org.apache.hadoop.hive.metastore.txn.TxnUtils; import org.apache.hadoop.hive.metastore.utils.FileUtils; import org.apache.hadoop.hive.metastore.utils.FilterUtils; import org.apache.hadoop.hive.metastore.utils.HdfsUtils; import org.apache.hadoop.hive.metastore.utils.JavaUtils; import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.metastore.utils.MetastoreVersionInfo; import org.apache.hadoop.hive.metastore.utils.SecurityUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.ReflectionUtils; import org.apache.thrift.TException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.jdo.JDOException; import java.io.IOException; import java.lang.reflect.UndeclaredThrowableException; import java.nio.ByteBuffer; import java.security.PrivilegedExceptionAction; import java.util.AbstractMap; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.PriorityQueue; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; import java.util.regex.Pattern; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.join; import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.TABLE_IS_CTAS; import static org.apache.hadoop.hive.metastore.ExceptionHandler.handleException; import static org.apache.hadoop.hive.metastore.ExceptionHandler.newMetaException; import static org.apache.hadoop.hive.metastore.ExceptionHandler.rethrowException; import static org.apache.hadoop.hive.metastore.ExceptionHandler.throwMetaException; import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_CATALOG_NAME; import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_COMMENT; import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME; import static org.apache.hadoop.hive.metastore.Warehouse.getCatalogQualifiedTableName; import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.CAT_NAME; import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.DB_NAME; import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog; import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.parseDbName; import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.prependCatalogToDbName; import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.prependNotNullCatToDbName; /** * Default handler for all Hive Metastore methods. Implements methods defined in hive_metastore.thrift. */ public class HMSHandler extends FacebookBase implements IHMSHandler { public static final Logger LOG = LoggerFactory.getLogger(HMSHandler.class); private final Configuration conf; // stores datastore (jpox) properties, // right now they come from jpox.properties // Flag to control that always threads are initialized only once // instead of multiple times private final static AtomicBoolean alwaysThreadsInitialized = new AtomicBoolean(false); private static String currentUrl; private FileMetadataManager fileMetadataManager; private PartitionExpressionProxy expressionProxy; private StorageSchemaReader storageSchemaReader; private IMetaStoreMetadataTransformer transformer; private static DataConnectorProviderFactory dataconnectorFactory = null; // Variables for metrics // Package visible so that HMSMetricsListener can see them. static AtomicInteger databaseCount, tableCount, partCount; public static final String PARTITION_NUMBER_EXCEED_LIMIT_MSG = "Number of partitions scanned (=%d) on table '%s' exceeds limit (=%d). This is controlled on the metastore server by %s."; // Used for testing to simulate method timeout. @VisibleForTesting static boolean testTimeoutEnabled = false; @VisibleForTesting static long testTimeoutValue = -1; public static final String TRUNCATE_SKIP_DATA_DELETION = "truncateSkipDataDeletion"; public static final String ADMIN = "admin"; public static final String PUBLIC = "public"; static final String NO_FILTER_STRING = ""; static final int UNLIMITED_MAX_PARTITIONS = -1; private Warehouse wh; // hdfs warehouse private static Striped<Lock> tablelocks; private static final ThreadLocal<RawStore> threadLocalMS = new ThreadLocal<RawStore>(); private static final ThreadLocal<TxnStore> threadLocalTxn = new ThreadLocal<TxnStore>(); private static final ThreadLocal<Map<String, com.codahale.metrics.Timer.Context>> timerContexts = new ThreadLocal<Map<String, com.codahale.metrics.Timer.Context>>() { @Override protected Map<String, com.codahale.metrics.Timer.Context> initialValue() { return new HashMap<>(); } }; public static RawStore getRawStore() { return threadLocalMS.get(); } static void cleanupRawStore() { try { RawStore rs = getRawStore(); if (rs != null) { logAndAudit("Cleaning up thread local RawStore..."); rs.shutdown(); } } finally { HMSHandler handler = threadLocalHMSHandler.get(); if (handler != null) { handler.notifyMetaListenersOnShutDown(); } threadLocalHMSHandler.remove(); threadLocalConf.remove(); threadLocalModifiedConfig.remove(); removeRawStore(); logAndAudit("Done cleaning up thread local RawStore"); } } static void removeRawStore() { threadLocalMS.remove(); } // Thread local configuration is needed as many threads could make changes // to the conf using the connection hook private static final ThreadLocal<Configuration> threadLocalConf = new ThreadLocal<Configuration>(); /** * Thread local HMSHandler used during shutdown to notify meta listeners */ private static final ThreadLocal<HMSHandler> threadLocalHMSHandler = new ThreadLocal<>(); /** * Thread local Map to keep track of modified meta conf keys */ private static final ThreadLocal<Map<String, String>> threadLocalModifiedConfig = new ThreadLocal<Map<String, String>>() { @Override protected Map<String, String> initialValue() { return new HashMap<>(); } }; private static ExecutorService threadPool; static final Logger auditLog = LoggerFactory.getLogger( HiveMetaStore.class.getName() + ".audit"); private static void logAuditEvent(String cmd) { if (cmd == null) { return; } UserGroupInformation ugi; try { ugi = SecurityUtils.getUGI(); } catch (Exception ex) { throw new RuntimeException(ex); } String address = getIPAddress(); if (address == null) { address = "unknown-ip-addr"; } auditLog.info("ugi={} ip={} cmd={} ", ugi.getUserName(), address, cmd); } public static String getIPAddress() { if (HiveMetaStore.useSasl) { if (HiveMetaStore.saslServer != null && HiveMetaStore.saslServer.getRemoteAddress() != null) { return HiveMetaStore.saslServer.getRemoteAddress().getHostAddress(); } } else { // if kerberos is not enabled return getThreadLocalIpAddress(); } return null; } private static AtomicInteger nextSerialNum = new AtomicInteger(); private static ThreadLocal<Integer> threadLocalId = new ThreadLocal<Integer>() { @Override protected Integer initialValue() { return nextSerialNum.getAndIncrement(); } }; // This will only be set if the metastore is being accessed from a metastore Thrift server, // not if it is from the CLI. Also, only if the TTransport being used to connect is an // instance of TSocket. This is also not set when kerberos is used. private static ThreadLocal<String> threadLocalIpAddress = new ThreadLocal<String>(); /** * Internal function to notify listeners for meta config change events */ private void notifyMetaListeners(String key, String oldValue, String newValue) throws MetaException { for (MetaStoreEventListener listener : listeners) { listener.onConfigChange(new ConfigChangeEvent(this, key, oldValue, newValue)); } if (transactionalListeners.size() > 0) { // All the fields of this event are final, so no reason to create a new one for each // listener ConfigChangeEvent cce = new ConfigChangeEvent(this, key, oldValue, newValue); for (MetaStoreEventListener transactionalListener : transactionalListeners) { transactionalListener.onConfigChange(cce); } } } /** * Internal function to notify listeners to revert back to old values of keys * that were modified during setMetaConf. This would get called from HiveMetaStore#cleanupRawStore */ private void notifyMetaListenersOnShutDown() { Map<String, String> modifiedConf = threadLocalModifiedConfig.get(); if (modifiedConf == null) { // Nothing got modified return; } try { Configuration conf = threadLocalConf.get(); if (conf == null) { throw new MetaException("Unexpected: modifiedConf is non-null but conf is null"); } // Notify listeners of the changed value for (Map.Entry<String, String> entry : modifiedConf.entrySet()) { String key = entry.getKey(); // curr value becomes old and vice-versa String currVal = entry.getValue(); String oldVal = conf.get(key); if (!Objects.equals(oldVal, currVal)) { notifyMetaListeners(key, oldVal, currVal); } } logAndAudit("Meta listeners shutdown notification completed."); } catch (MetaException e) { LOG.error("Failed to notify meta listeners on shutdown: ", e); } } static void setThreadLocalIpAddress(String ipAddress) { threadLocalIpAddress.set(ipAddress); } // This will return null if the metastore is not being accessed from a metastore Thrift server, // or if the TTransport being used to connect is not an instance of TSocket, or if kereberos // is used static String getThreadLocalIpAddress() { return threadLocalIpAddress.get(); } // Make it possible for tests to check that the right type of PartitionExpressionProxy was // instantiated. @VisibleForTesting PartitionExpressionProxy getExpressionProxy() { return expressionProxy; } /** * Use {@link #getThreadId()} instead. * @return thread id */ @Deprecated public static Integer get() { return threadLocalId.get(); } @Override public int getThreadId() { return threadLocalId.get(); } public HMSHandler(String name) throws MetaException { this(name, MetastoreConf.newMetastoreConf(), true); } public HMSHandler(String name, Configuration conf) throws MetaException { this(name, conf, true); } public HMSHandler(String name, Configuration conf, boolean init) throws MetaException { super(name); this.conf = conf; isInTest = MetastoreConf.getBoolVar(this.conf, ConfVars.HIVE_IN_TEST); if (threadPool == null) { synchronized (HMSHandler.class) { if (threadPool == null) { int numThreads = MetastoreConf.getIntVar(conf, ConfVars.FS_HANDLER_THREADS_COUNT); threadPool = Executors.newFixedThreadPool(numThreads, new ThreadFactoryBuilder().setDaemon(true).setNameFormat("HMSHandler #%d").build()); int numTableLocks = MetastoreConf.getIntVar(conf, ConfVars.METASTORE_NUM_STRIPED_TABLE_LOCKS); tablelocks = Striped.lock(numTableLocks); } } } if (init) { init(); } } /** * Use {@link #getConf()} instead. * @return Configuration object */ @Deprecated public Configuration getHiveConf() { return conf; } private AlterHandler alterHandler; private List<MetaStorePreEventListener> preListeners; private List<MetaStoreEventListener> listeners; private List<TransactionalMetaStoreEventListener> transactionalListeners; private List<MetaStoreEndFunctionListener> endFunctionListeners; private List<MetaStoreInitListener> initListeners; private MetaStoreFilterHook filterHook; private boolean isServerFilterEnabled = false; private Pattern partitionValidationPattern; private final boolean isInTest; @Override public List<TransactionalMetaStoreEventListener> getTransactionalListeners() { return transactionalListeners; } @Override public List<MetaStoreEventListener> getListeners() { return listeners; } @Override public void init() throws MetaException { initListeners = MetaStoreServerUtils.getMetaStoreListeners( MetaStoreInitListener.class, conf, MetastoreConf.getVar(conf, ConfVars.INIT_HOOKS)); for (MetaStoreInitListener singleInitListener: initListeners) { MetaStoreInitContext context = new MetaStoreInitContext(); singleInitListener.onInit(context); } String alterHandlerName = MetastoreConf.getVar(conf, ConfVars.ALTER_HANDLER); alterHandler = ReflectionUtils.newInstance(JavaUtils.getClass( alterHandlerName, AlterHandler.class), conf); wh = new Warehouse(conf); synchronized (HMSHandler.class) { if (currentUrl == null || !currentUrl.equals(MetaStoreInit.getConnectionURL(conf))) { createDefaultDB(); createDefaultRoles(); addAdminUsers(); currentUrl = MetaStoreInit.getConnectionURL(conf); } } //Start Metrics if (MetastoreConf.getBoolVar(conf, ConfVars.METRICS_ENABLED)) { LOG.info("Begin calculating metadata count metrics."); Metrics.initialize(conf); databaseCount = Metrics.getOrCreateGauge(MetricsConstants.TOTAL_DATABASES); tableCount = Metrics.getOrCreateGauge(MetricsConstants.TOTAL_TABLES); partCount = Metrics.getOrCreateGauge(MetricsConstants.TOTAL_PARTITIONS); updateMetrics(); } preListeners = MetaStoreServerUtils.getMetaStoreListeners(MetaStorePreEventListener.class, conf, MetastoreConf.getVar(conf, ConfVars.PRE_EVENT_LISTENERS)); preListeners.add(0, new TransactionalValidationListener(conf)); listeners = MetaStoreServerUtils.getMetaStoreListeners(MetaStoreEventListener.class, conf, MetastoreConf.getVar(conf, ConfVars.EVENT_LISTENERS)); listeners.add(new SessionPropertiesListener(conf)); transactionalListeners = new ArrayList() {{ add(new AcidEventListener(conf)); }}; transactionalListeners.addAll(MetaStoreServerUtils.getMetaStoreListeners( TransactionalMetaStoreEventListener.class, conf, MetastoreConf.getVar(conf, ConfVars.TRANSACTIONAL_EVENT_LISTENERS))); if (Metrics.getRegistry() != null) { listeners.add(new HMSMetricsListener(conf)); } boolean canCachedStoreCanUseEvent = false; for (MetaStoreEventListener listener : transactionalListeners) { if (listener.doesAddEventsToNotificationLogTable()) { canCachedStoreCanUseEvent = true; break; } } if (conf.getBoolean(ConfVars.METASTORE_CACHE_CAN_USE_EVENT.getVarname(), false) && !canCachedStoreCanUseEvent) { throw new MetaException("CahcedStore can not use events for invalidation as there is no " + " TransactionalMetaStoreEventListener to add events to notification table"); } endFunctionListeners = MetaStoreServerUtils.getMetaStoreListeners( MetaStoreEndFunctionListener.class, conf, MetastoreConf.getVar(conf, ConfVars.END_FUNCTION_LISTENERS)); String partitionValidationRegex = MetastoreConf.getVar(conf, ConfVars.PARTITION_NAME_WHITELIST_PATTERN); if (partitionValidationRegex != null && !partitionValidationRegex.isEmpty()) { partitionValidationPattern = Pattern.compile(partitionValidationRegex); } // We only initialize once the tasks that need to be run periodically. For remote metastore // these threads are started along with the other housekeeping threads only in the leader // HMS. String leaderHost = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.METASTORE_HOUSEKEEPING_LEADER_HOSTNAME); if (!HiveMetaStore.isMetaStoreRemote() && ((leaderHost == null) || leaderHost.trim().isEmpty())) { startAlwaysTaskThreads(conf); } else if (!HiveMetaStore.isMetaStoreRemote()) { LOG.info("Not starting tasks specified by " + ConfVars.TASK_THREADS_ALWAYS.getVarname() + " since " + leaderHost + " is configured to run these tasks."); } expressionProxy = PartFilterExprUtil.createExpressionProxy(conf); fileMetadataManager = new FileMetadataManager(this.getMS(), conf); isServerFilterEnabled = getIfServerFilterenabled(); filterHook = isServerFilterEnabled ? loadFilterHooks() : null; String className = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.METASTORE_METADATA_TRANSFORMER_CLASS); if (className != null && !className.trim().isEmpty()) { try { transformer = JavaUtils.newInstance(JavaUtils.getClass(className.trim(), IMetaStoreMetadataTransformer.class), new Class[] {IHMSHandler.class}, new Object[] {this}); } catch (Exception e) { LOG.error("Unable to create instance of class " + className, e); throw new IllegalArgumentException(e); } } dataconnectorFactory = DataConnectorProviderFactory.getInstance(this); } static void startAlwaysTaskThreads(Configuration conf) throws MetaException { if (alwaysThreadsInitialized.compareAndSet(false, true)) { ThreadPool.initialize(conf); Collection<String> taskNames = MetastoreConf.getStringCollection(conf, ConfVars.TASK_THREADS_ALWAYS); for (String taskName : taskNames) { MetastoreTaskThread task = JavaUtils.newInstance(JavaUtils.getClass(taskName, MetastoreTaskThread.class)); task.setConf(conf); long freq = task.runFrequency(TimeUnit.MILLISECONDS); LOG.info("Scheduling for " + task.getClass().getCanonicalName() + " service with " + "frequency " + freq + "ms."); // For backwards compatibility, since some threads used to be hard coded but only run if // frequency was > 0 if (freq > 0) { ThreadPool.getPool().scheduleAtFixedRate(task, freq, freq, TimeUnit.MILLISECONDS); } } } } /** * * Filter is actually enabled only when the configured filter hook is configured, not default, and * enabled in configuration * @return */ private boolean getIfServerFilterenabled() throws MetaException{ boolean isEnabled = MetastoreConf.getBoolVar(conf, ConfVars.METASTORE_SERVER_FILTER_ENABLED); if (!isEnabled) { LOG.info("HMS server filtering is disabled by configuration"); return false; } String filterHookClassName = MetastoreConf.getVar(conf, ConfVars.FILTER_HOOK); if (isBlank(filterHookClassName)) { throw new MetaException("HMS server filtering is enabled but no filter hook is configured"); } if (filterHookClassName.trim().equalsIgnoreCase(DefaultMetaStoreFilterHookImpl.class.getName())) { throw new MetaException("HMS server filtering is enabled but the filter hook is DefaultMetaStoreFilterHookImpl, which does no filtering"); } LOG.info("HMS server filtering is enabled. The filter class is " + filterHookClassName); return true; } private MetaStoreFilterHook loadFilterHooks() throws IllegalStateException { String errorMsg = "Unable to load filter hook at HMS server. "; String filterHookClassName = MetastoreConf.getVar(conf, ConfVars.FILTER_HOOK); Preconditions.checkState(!isBlank(filterHookClassName)); try { return (MetaStoreFilterHook)Class.forName( filterHookClassName.trim(), true, JavaUtils.getClassLoader()).getConstructor( Configuration.class).newInstance(conf); } catch (Exception e) { LOG.error(errorMsg, e); throw new IllegalStateException(errorMsg + e.getMessage(), e); } } /** * Check if user can access the table associated with the partition. If not, then throw exception * so user cannot access partitions associated with this table * We are not calling Pre event listener for authorization because it requires getting the * table object from DB, more overhead. Instead ,we call filter hook to filter out table if user * has no access. Filter hook only requires table name, not table object. That saves DB access for * table object, and still achieve the same purpose: checking if user can access the specified * table * * @param catName catalog name of the table * @param dbName database name of the table * @param tblName table name * @throws NoSuchObjectException * @throws MetaException */ private void authorizeTableForPartitionMetadata( final String catName, final String dbName, final String tblName) throws NoSuchObjectException, MetaException { FilterUtils.checkDbAndTableFilters( isServerFilterEnabled, filterHook, catName, dbName, tblName); } private static String addPrefix(String s) { return threadLocalId.get() + ": " + s; } /** * Set copy of invoking HMSHandler on thread local */ private static void setHMSHandler(HMSHandler handler) { if (threadLocalHMSHandler.get() == null) { threadLocalHMSHandler.set(handler); } } @Override public void setConf(Configuration conf) { threadLocalConf.set(conf); RawStore ms = threadLocalMS.get(); if (ms != null) { ms.setConf(conf); // reload if DS related configuration is changed } } @Override public Configuration getConf() { Configuration conf = threadLocalConf.get(); if (conf == null) { conf = new Configuration(this.conf); threadLocalConf.set(conf); } return conf; } @Override public Warehouse getWh() { return wh; } @Override public void setMetaConf(String key, String value) throws MetaException { ConfVars confVar = MetastoreConf.getMetaConf(key); if (confVar == null) { throw new MetaException("Invalid configuration key " + key); } try { confVar.validate(value); } catch (IllegalArgumentException e) { throw new MetaException("Invalid configuration value " + value + " for key " + key + " by " + e.getMessage()); } Configuration configuration = getConf(); String oldValue = MetastoreConf.get(configuration, key); // Save prev val of the key on threadLocal Map<String, String> modifiedConf = threadLocalModifiedConfig.get(); if (!modifiedConf.containsKey(key)) { modifiedConf.put(key, oldValue); } // Set invoking HMSHandler on threadLocal, this will be used later to notify // metaListeners in HiveMetaStore#cleanupRawStore setHMSHandler(this); configuration.set(key, value); notifyMetaListeners(key, oldValue, value); if (ConfVars.TRY_DIRECT_SQL == confVar) { HMSHandler.LOG.info("Direct SQL optimization = {}", value); } } @Override public String getMetaConf(String key) throws MetaException { ConfVars confVar = MetastoreConf.getMetaConf(key); if (confVar == null) { throw new MetaException("Invalid configuration key " + key); } return getConf().get(key, confVar.getDefaultVal().toString()); } /** * Get a cached RawStore. * * @return the cached RawStore * @throws MetaException */ @Override public RawStore getMS() throws MetaException { Configuration conf = getConf(); return getMSForConf(conf); } public static RawStore getMSForConf(Configuration conf) throws MetaException { RawStore ms = threadLocalMS.get(); if (ms == null) { ms = newRawStoreForConf(conf); try { ms.verifySchema(); } catch (MetaException e) { ms.shutdown(); throw e; } threadLocalMS.set(ms); ms = threadLocalMS.get(); LOG.info("Created RawStore: " + ms + " from thread id: " + Thread.currentThread().getId()); } return ms; } @Override public TxnStore getTxnHandler() { return getMsThreadTxnHandler(conf); } public static TxnStore getMsThreadTxnHandler(Configuration conf) { TxnStore txn = threadLocalTxn.get(); if (txn == null) { txn = TxnUtils.getTxnStore(conf); threadLocalTxn.set(txn); } return txn; } static RawStore newRawStoreForConf(Configuration conf) throws MetaException { Configuration newConf = new Configuration(conf); String rawStoreClassName = MetastoreConf.getVar(newConf, ConfVars.RAW_STORE_IMPL); LOG.info(addPrefix("Opening raw store with implementation class:" + rawStoreClassName)); return RawStoreProxy.getProxy(newConf, conf, rawStoreClassName, threadLocalId.get()); } @VisibleForTesting public static void createDefaultCatalog(RawStore ms, Warehouse wh) throws MetaException, InvalidOperationException { try { Catalog defaultCat = ms.getCatalog(DEFAULT_CATALOG_NAME); // Null check because in some test cases we get a null from ms.getCatalog. if (defaultCat !=null && defaultCat.getLocationUri().equals("TBD")) { // One time update issue. When the new 'hive' catalog is created in an upgrade the // script does not know the location of the warehouse. So we need to update it. LOG.info("Setting location of default catalog, as it hasn't been done after upgrade"); defaultCat.setLocationUri(wh.getWhRoot().toString()); ms.alterCatalog(defaultCat.getName(), defaultCat); } } catch (NoSuchObjectException e) { Catalog cat = new Catalog(DEFAULT_CATALOG_NAME, wh.getWhRoot().toString()); long time = System.currentTimeMillis() / 1000; cat.setCreateTime((int) time); cat.setDescription(Warehouse.DEFAULT_CATALOG_COMMENT); ms.createCatalog(cat); } } private void createDefaultDB_core(RawStore ms) throws MetaException, InvalidObjectException { try { ms.getDatabase(DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME); } catch (NoSuchObjectException e) { LOG.info("Started creating a default database with name: "+DEFAULT_DATABASE_NAME); Database db = new Database(DEFAULT_DATABASE_NAME, DEFAULT_DATABASE_COMMENT, wh.getDefaultDatabasePath(DEFAULT_DATABASE_NAME, true).toString(), null); db.setOwnerName(PUBLIC); db.setOwnerType(PrincipalType.ROLE); db.setCatalogName(DEFAULT_CATALOG_NAME); long time = System.currentTimeMillis() / 1000; db.setCreateTime((int) time); db.setType(DatabaseType.NATIVE); ms.createDatabase(db); LOG.info("Successfully created a default database with name: "+DEFAULT_DATABASE_NAME); } } /** * create default database if it doesn't exist. * * This is a potential contention when HiveServer2 using embedded metastore and Metastore * Server try to concurrently invoke createDefaultDB. If one failed, JDOException was caught * for one more time try, if failed again, simply ignored by warning, which meant another * succeeds. * * @throws MetaException */ private void createDefaultDB() throws MetaException { try { RawStore ms = getMS(); createDefaultCatalog(ms, wh); createDefaultDB_core(ms); } catch (JDOException e) { LOG.warn("Retrying creating default database after error: " + e.getMessage(), e); try { RawStore ms = getMS(); createDefaultCatalog(ms, wh); createDefaultDB_core(ms); } catch (InvalidObjectException | InvalidOperationException e1) { throw new MetaException(e1.getMessage()); } } catch (InvalidObjectException|InvalidOperationException e) { throw new MetaException(e.getMessage()); } } /** * create default roles if they don't exist. * * This is a potential contention when HiveServer2 using embedded metastore and Metastore * Server try to concurrently invoke createDefaultRoles. If one failed, JDOException was caught * for one more time try, if failed again, simply ignored by warning, which meant another * succeeds. * * @throws MetaException */ private void createDefaultRoles() throws MetaException { try { createDefaultRoles_core(); } catch (JDOException e) { LOG.warn("Retrying creating default roles after error: " + e.getMessage(), e); createDefaultRoles_core(); } } private void createDefaultRoles_core() throws MetaException { RawStore ms = getMS(); try { ms.addRole(ADMIN, ADMIN); } catch (InvalidObjectException e) { LOG.debug(ADMIN +" role already exists",e); } catch (NoSuchObjectException e) { // This should never be thrown. LOG.warn("Unexpected exception while adding " +ADMIN+" roles" , e); } LOG.info("Added "+ ADMIN+ " role in metastore"); try { ms.addRole(PUBLIC, PUBLIC); } catch (InvalidObjectException e) { LOG.debug(PUBLIC + " role already exists",e); } catch (NoSuchObjectException e) { // This should never be thrown. LOG.warn("Unexpected exception while adding "+PUBLIC +" roles" , e); } LOG.info("Added "+PUBLIC+ " role in metastore"); // now grant all privs to admin PrivilegeBag privs = new PrivilegeBag(); privs.addToPrivileges(new HiveObjectPrivilege( new HiveObjectRef(HiveObjectType.GLOBAL, null, null, null, null), ADMIN, PrincipalType.ROLE, new PrivilegeGrantInfo("All", 0, ADMIN, PrincipalType.ROLE, true), "SQL")); try { ms.grantPrivileges(privs); } catch (InvalidObjectException e) { // Surprisingly these privs are already granted. LOG.debug("Failed while granting global privs to admin", e); } catch (NoSuchObjectException e) { // Unlikely to be thrown. LOG.warn("Failed while granting global privs to admin", e); } } /** * add admin users if they don't exist. * * This is a potential contention when HiveServer2 using embedded metastore and Metastore * Server try to concurrently invoke addAdminUsers. If one failed, JDOException was caught for * one more time try, if failed again, simply ignored by warning, which meant another succeeds. * * @throws MetaException */ private void addAdminUsers() throws MetaException { try { addAdminUsers_core(); } catch (JDOException e) { LOG.warn("Retrying adding admin users after error: " + e.getMessage(), e); addAdminUsers_core(); } } private void addAdminUsers_core() throws MetaException { // now add pre-configured users to admin role String userStr = MetastoreConf.getVar(conf,ConfVars.USERS_IN_ADMIN_ROLE,"").trim(); if (userStr.isEmpty()) { LOG.info("No user is added in admin role, since config is empty"); return; } // Since user names need to be valid unix user names, per IEEE Std 1003.1-2001 they cannot // contain comma, so we can safely split above string on comma. Iterator<String> users = Splitter.on(",").trimResults().omitEmptyStrings().split(userStr).iterator(); if (!users.hasNext()) { LOG.info("No user is added in admin role, since config value "+ userStr + " is in incorrect format. We accept comma separated list of users."); return; } Role adminRole; RawStore ms = getMS(); try { adminRole = ms.getRole(ADMIN); } catch (NoSuchObjectException e) { LOG.error("Failed to retrieve just added admin role",e); return; } while (users.hasNext()) { String userName = users.next(); try { ms.grantRole(adminRole, userName, PrincipalType.USER, ADMIN, PrincipalType.ROLE, true); LOG.info("Added " + userName + " to admin role"); } catch (NoSuchObjectException e) { LOG.error("Failed to add "+ userName + " in admin role",e); } catch (InvalidObjectException e) { LOG.debug(userName + " already in admin role", e); } } } private static void logAndAudit(final String m) { LOG.debug("{}: {}", threadLocalId.get(), m); logAuditEvent(m); } private String startFunction(String function, String extraLogInfo) { incrementCounter(function); logAndAudit((getThreadLocalIpAddress() == null ? "" : "source:" + getThreadLocalIpAddress() + " ") + function + extraLogInfo); com.codahale.metrics.Timer timer = Metrics.getOrCreateTimer(MetricsConstants.API_PREFIX + function); if (timer != null) { // Timer will be null we aren't using the metrics timerContexts.get().put(function, timer.time()); } Counter counter = Metrics.getOrCreateCounter(MetricsConstants.ACTIVE_CALLS + function); if (counter != null) { counter.inc(); } return function; } private String startFunction(String function) { return startFunction(function, ""); } private void startTableFunction(String function, String catName, String db, String tbl) { startFunction(function, " : tbl=" + TableName.getQualified(catName, db, tbl)); } private void startMultiTableFunction(String function, String db, List<String> tbls) { String tableNames = join(tbls, ","); startFunction(function, " : db=" + db + " tbls=" + tableNames); } private void startPartitionFunction(String function, String cat, String db, String tbl, List<String> partVals) { startFunction(function, " : tbl=" + TableName.getQualified(cat, db, tbl) + "[" + join(partVals, ",") + "]"); } private void startPartitionFunction(String function, String catName, String db, String tbl, Map<String, String> partName) { startFunction(function, " : tbl=" + TableName.getQualified(catName, db, tbl) + "partition=" + partName); } private void endFunction(String function, boolean successful, Exception e) { endFunction(function, successful, e, null); } private void endFunction(String function, boolean successful, Exception e, String inputTableName) { endFunction(function, new MetaStoreEndFunctionContext(successful, e, inputTableName)); } private void endFunction(String function, MetaStoreEndFunctionContext context) { com.codahale.metrics.Timer.Context timerContext = timerContexts.get().remove(function); if (timerContext != null) { long timeTaken = timerContext.stop(); LOG.debug((getThreadLocalIpAddress() == null ? "" : "source:" + getThreadLocalIpAddress() + " ") + function + "time taken(ns): " + timeTaken); } Counter counter = Metrics.getOrCreateCounter(MetricsConstants.ACTIVE_CALLS + function); if (counter != null) { counter.dec(); } for (MetaStoreEndFunctionListener listener : endFunctionListeners) { listener.onEndFunction(function, context); } } @Override public fb_status getStatus() { return fb_status.ALIVE; } @Override public void shutdown() { cleanupRawStore(); PerfLogger.getPerfLogger(false).cleanupPerfLogMetrics(); } @Override public AbstractMap<String, Long> getCounters() { AbstractMap<String, Long> counters = super.getCounters(); // Allow endFunctionListeners to add any counters they have collected if (endFunctionListeners != null) { for (MetaStoreEndFunctionListener listener : endFunctionListeners) { listener.exportCounters(counters); } } return counters; } @Override public void create_catalog(CreateCatalogRequest rqst) throws AlreadyExistsException, InvalidObjectException, MetaException { Catalog catalog = rqst.getCatalog(); startFunction("create_catalog", ": " + catalog.toString()); boolean success = false; Exception ex = null; try { try { getMS().getCatalog(catalog.getName()); throw new AlreadyExistsException("Catalog " + catalog.getName() + " already exists"); } catch (NoSuchObjectException e) { // expected } if (!MetaStoreUtils.validateName(catalog.getName(), null)) { throw new InvalidObjectException(catalog.getName() + " is not a valid catalog name"); } if (catalog.getLocationUri() == null) { throw new InvalidObjectException("You must specify a path for the catalog"); } RawStore ms = getMS(); Path catPath = new Path(catalog.getLocationUri()); boolean madeDir = false; Map<String, String> transactionalListenersResponses = Collections.emptyMap(); try { firePreEvent(new PreCreateCatalogEvent(this, catalog)); if (!wh.isDir(catPath)) { if (!wh.mkdirs(catPath)) { throw new MetaException("Unable to create catalog path " + catPath + ", failed to create catalog " + catalog.getName()); } madeDir = true; } // set the create time of catalog long time = System.currentTimeMillis() / 1000; catalog.setCreateTime((int) time); ms.openTransaction(); ms.createCatalog(catalog); // Create a default database inside the catalog Database db = new Database(DEFAULT_DATABASE_NAME, "Default database for catalog " + catalog.getName(), catalog.getLocationUri(), Collections.emptyMap()); db.setCatalogName(catalog.getName()); create_database_core(ms, db); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.CREATE_CATALOG, new CreateCatalogEvent(true, this, catalog)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); if (madeDir) { wh.deleteDir(catPath, true, false, false); } } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.CREATE_CATALOG, new CreateCatalogEvent(success, this, catalog), null, transactionalListenersResponses, ms); } } success = true; } catch (AlreadyExistsException|InvalidObjectException|MetaException e) { ex = e; throw e; } finally { endFunction("create_catalog", success, ex); } } @Override public void alter_catalog(AlterCatalogRequest rqst) throws TException { startFunction("alter_catalog " + rqst.getName()); boolean success = false; Exception ex = null; RawStore ms = getMS(); Map<String, String> transactionalListenersResponses = Collections.emptyMap(); GetCatalogResponse oldCat = null; try { oldCat = get_catalog(new GetCatalogRequest(rqst.getName())); // Above should have thrown NoSuchObjectException if there is no such catalog assert oldCat != null && oldCat.getCatalog() != null; firePreEvent(new PreAlterCatalogEvent(oldCat.getCatalog(), rqst.getNewCat(), this)); ms.openTransaction(); ms.alterCatalog(rqst.getName(), rqst.getNewCat()); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ALTER_CATALOG, new AlterCatalogEvent(oldCat.getCatalog(), rqst.getNewCat(), true, this)); } success = ms.commitTransaction(); } catch (MetaException|NoSuchObjectException e) { ex = e; throw e; } finally { if (!success) { ms.rollbackTransaction(); } if ((null != oldCat) && (!listeners.isEmpty())) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_CATALOG, new AlterCatalogEvent(oldCat.getCatalog(), rqst.getNewCat(), success, this), null, transactionalListenersResponses, ms); } endFunction("alter_catalog", success, ex); } } @Override public GetCatalogResponse get_catalog(GetCatalogRequest rqst) throws NoSuchObjectException, TException { String catName = rqst.getName(); startFunction("get_catalog", ": " + catName); Catalog cat = null; Exception ex = null; try { cat = getMS().getCatalog(catName); firePreEvent(new PreReadCatalogEvent(this, cat)); return new GetCatalogResponse(cat); } catch (MetaException|NoSuchObjectException e) { ex = e; throw e; } finally { endFunction("get_catalog", cat != null, ex); } } @Override public GetCatalogsResponse get_catalogs() throws MetaException { startFunction("get_catalogs"); List<String> ret = null; Exception ex = null; try { ret = getMS().getCatalogs(); } catch (Exception e) { ex = e; throw e; } finally { endFunction("get_catalog", ret != null, ex); } return new GetCatalogsResponse(ret == null ? Collections.emptyList() : ret); } @Override public void drop_catalog(DropCatalogRequest rqst) throws NoSuchObjectException, InvalidOperationException, MetaException { String catName = rqst.getName(); startFunction("drop_catalog", ": " + catName); if (DEFAULT_CATALOG_NAME.equalsIgnoreCase(catName)) { endFunction("drop_catalog", false, null); throw new MetaException("Can not drop " + DEFAULT_CATALOG_NAME + " catalog"); } boolean success = false; Exception ex = null; try { dropCatalogCore(catName); success = true; } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(NoSuchObjectException.class, InvalidOperationException.class, MetaException.class) .defaultMetaException(); } finally { endFunction("drop_catalog", success, ex); } } private void dropCatalogCore(String catName) throws MetaException, NoSuchObjectException, InvalidOperationException { boolean success = false; Catalog cat = null; Map<String, String> transactionalListenerResponses = Collections.emptyMap(); RawStore ms = getMS(); try { ms.openTransaction(); cat = ms.getCatalog(catName); firePreEvent(new PreDropCatalogEvent(this, cat)); List<String> allDbs = get_databases(prependNotNullCatToDbName(catName, null)); if (allDbs != null && !allDbs.isEmpty()) { // It might just be the default, in which case we can drop that one if it's empty if (allDbs.size() == 1 && allDbs.get(0).equals(DEFAULT_DATABASE_NAME)) { try { drop_database_core(ms, catName, DEFAULT_DATABASE_NAME, true, false); } catch (InvalidOperationException e) { // This means there are tables of something in the database throw new InvalidOperationException("There are still objects in the default " + "database for catalog " + catName); } catch (InvalidObjectException|IOException|InvalidInputException e) { MetaException me = new MetaException("Error attempt to drop default database for " + "catalog " + catName); me.initCause(e); throw me; } } else { throw new InvalidOperationException("There are non-default databases in the catalog " + catName + " so it cannot be dropped."); } } ms.dropCatalog(catName) ; if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DROP_CATALOG, new DropCatalogEvent(true, this, cat)); } success = ms.commitTransaction(); } finally { if (success) { wh.deleteDir(wh.getDnsPath(new Path(cat.getLocationUri())), false, false, false); } else { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_CATALOG, new DropCatalogEvent(success, this, cat), null, transactionalListenerResponses, ms); } } } static boolean isDbReplicationTarget(Database db) { if (db.getParameters() == null) { return false; } if (!db.getParameters().containsKey(ReplConst.REPL_TARGET_DB_PROPERTY)) { return false; } return !db.getParameters().get(ReplConst.REPL_TARGET_DB_PROPERTY).trim().isEmpty(); } // Assumes that the catalog has already been set. private void create_database_core(RawStore ms, final Database db) throws AlreadyExistsException, InvalidObjectException, MetaException { if (!MetaStoreUtils.validateName(db.getName(), conf)) { throw new InvalidObjectException(db.getName() + " is not a valid database name"); } Catalog cat = null; try { cat = getMS().getCatalog(db.getCatalogName()); } catch (NoSuchObjectException e) { LOG.error("No such catalog " + db.getCatalogName()); throw new InvalidObjectException("No such catalog " + db.getCatalogName()); } boolean skipAuthorization = false; String passedInURI = db.getLocationUri(); String passedInManagedURI = db.getManagedLocationUri(); if (passedInURI == null && passedInManagedURI == null) { skipAuthorization = true; } final Path defaultDbExtPath = wh.getDefaultDatabasePath(db.getName(), true); final Path defaultDbMgdPath = wh.getDefaultDatabasePath(db.getName(), false); final Path dbExtPath = (passedInURI != null) ? wh.getDnsPath(new Path(passedInURI)) : wh.determineDatabasePath(cat, db); final Path dbMgdPath = (passedInManagedURI != null) ? wh.getDnsPath(new Path(passedInManagedURI)) : null; if ((defaultDbExtPath.equals(dbExtPath) && defaultDbMgdPath.equals(dbMgdPath)) && ((dbMgdPath == null) || dbMgdPath.equals(defaultDbMgdPath))) { skipAuthorization = true; } if ( skipAuthorization ) { //null out to skip authorizer URI check db.setLocationUri(null); db.setManagedLocationUri(null); }else{ db.setLocationUri(dbExtPath.toString()); if (dbMgdPath != null) { db.setManagedLocationUri(dbMgdPath.toString()); } } if (db.getOwnerName() == null){ try { db.setOwnerName(SecurityUtils.getUGI().getShortUserName()); }catch (Exception e){ LOG.warn("Failed to get owner name for create database operation.", e); } } long time = System.currentTimeMillis()/1000; db.setCreateTime((int) time); boolean success = false; boolean madeManagedDir = false; boolean madeExternalDir = false; boolean isReplicated = isDbReplicationTarget(db); Map<String, String> transactionalListenersResponses = Collections.emptyMap(); try { firePreEvent(new PreCreateDatabaseEvent(db, this)); //reinstate location uri for metastore db. if (skipAuthorization == true){ db.setLocationUri(dbExtPath.toString()); if (dbMgdPath != null) { db.setManagedLocationUri(dbMgdPath.toString()); } } if (db.getCatalogName() != null && !db.getCatalogName(). equals(Warehouse.DEFAULT_CATALOG_NAME)) { if (!wh.isDir(dbExtPath)) { LOG.debug("Creating database path " + dbExtPath); if (!wh.mkdirs(dbExtPath)) { throw new MetaException("Unable to create database path " + dbExtPath + ", failed to create database " + db.getName()); } madeExternalDir = true; } } else { if (dbMgdPath != null) { try { // Since this may be done as random user (if doAs=true) he may not have access // to the managed directory. We run this as an admin user madeManagedDir = UserGroupInformation.getLoginUser().doAs(new PrivilegedExceptionAction<Boolean>() { @Override public Boolean run() throws MetaException { if (!wh.isDir(dbMgdPath)) { LOG.info("Creating database path in managed directory " + dbMgdPath); if (!wh.mkdirs(dbMgdPath)) { throw new MetaException("Unable to create database managed path " + dbMgdPath + ", failed to create database " + db.getName()); } return true; } return false; } }); if (madeManagedDir) { LOG.info("Created database path in managed directory " + dbMgdPath); } else if (!isInTest || !isDbReplicationTarget(db)) { // Hive replication tests doesn't drop the db after each test throw new MetaException( "Unable to create database managed directory " + dbMgdPath + ", failed to create database " + db.getName()); } } catch (IOException | InterruptedException e) { throw new MetaException( "Unable to create database managed directory " + dbMgdPath + ", failed to create database " + db.getName() + ":" + e.getMessage()); } } if (dbExtPath != null) { try { madeExternalDir = UserGroupInformation.getCurrentUser().doAs(new PrivilegedExceptionAction<Boolean>() { @Override public Boolean run() throws MetaException { if (!wh.isDir(dbExtPath)) { LOG.info("Creating database path in external directory " + dbExtPath); return wh.mkdirs(dbExtPath); } return false; } }); if (madeExternalDir) { LOG.info("Created database path in external directory " + dbExtPath); } else { LOG.warn("Failed to create external path " + dbExtPath + " for database " + db.getName() + ". This may result in access not being allowed if the " + "StorageBasedAuthorizationProvider is enabled"); } } catch (IOException | InterruptedException | UndeclaredThrowableException e) { throw new MetaException("Failed to create external path " + dbExtPath + " for database " + db.getName() + ". This may result in access not being allowed if the " + "StorageBasedAuthorizationProvider is enabled: " + e.getMessage()); } } else { LOG.info("Database external path won't be created since the external warehouse directory is not defined"); } } ms.openTransaction(); ms.createDatabase(db); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.CREATE_DATABASE, new CreateDatabaseEvent(db, true, this, isReplicated)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); if (db.getCatalogName() != null && !db.getCatalogName(). equals(Warehouse.DEFAULT_CATALOG_NAME)) { if (madeManagedDir && dbMgdPath != null) { wh.deleteDir(dbMgdPath, true, db); } } else { if (madeManagedDir && dbMgdPath != null) { try { UserGroupInformation.getLoginUser().doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { wh.deleteDir(dbMgdPath, true, db); return null; } }); } catch (IOException | InterruptedException e) { LOG.error( "Couldn't delete managed directory " + dbMgdPath + " after " + "it was created for database " + db.getName() + " " + e.getMessage()); } } if (madeExternalDir && dbExtPath != null) { try { UserGroupInformation.getCurrentUser().doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { wh.deleteDir(dbExtPath, true, db); return null; } }); } catch (IOException | InterruptedException e) { LOG.error("Couldn't delete external directory " + dbExtPath + " after " + "it was created for database " + db.getName() + " " + e.getMessage()); } } } } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.CREATE_DATABASE, new CreateDatabaseEvent(db, success, this, isReplicated), null, transactionalListenersResponses, ms); } } } @Override public void create_database(final Database db) throws AlreadyExistsException, InvalidObjectException, MetaException { startFunction("create_database", ": " + db.toString()); boolean success = false; Exception ex = null; if (!db.isSetCatalogName()) { db.setCatalogName(getDefaultCatalog(conf)); } try { try { if (null != get_database_core(db.getCatalogName(), db.getName())) { throw new AlreadyExistsException("Database " + db.getName() + " already exists"); } } catch (NoSuchObjectException e) { // expected } if (testTimeoutEnabled) { try { Thread.sleep(testTimeoutValue); } catch (InterruptedException e) { // do nothing } Deadline.checkTimeout(); } create_database_core(getMS(), db); success = true; } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(MetaException.class, InvalidObjectException.class, AlreadyExistsException.class) .defaultMetaException(); } finally { endFunction("create_database", success, ex); } } @Override public Database get_database(final String name) throws NoSuchObjectException, MetaException { GetDatabaseRequest request = new GetDatabaseRequest(); String[] parsedDbName = parseDbName(name, conf); request.setName(parsedDbName[DB_NAME]); if (parsedDbName[CAT_NAME] != null) { request.setCatalogName(parsedDbName[CAT_NAME]); } return get_database_req(request); } @Override public Database get_database_core(String catName, final String name) throws NoSuchObjectException, MetaException { Database db = null; if (name == null) { throw new MetaException("Database name cannot be null."); } try { db = getMS().getDatabase(catName, name); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class, NoSuchObjectException.class) .defaultRuntimeException(); } return db; } @Override public Database get_database_req(GetDatabaseRequest request) throws NoSuchObjectException, MetaException { startFunction("get_database", ": " + request.getName()); Database db = null; Exception ex = null; if (request.getName() == null) { throw new MetaException("Database name cannot be null."); } List<String> processorCapabilities = request.getProcessorCapabilities(); String processorId = request.getProcessorIdentifier(); try { db = getMS().getDatabase(request.getCatalogName(), request.getName()); firePreEvent(new PreReadDatabaseEvent(db, this)); if (transformer != null) { db = transformer.transformDatabase(db, processorCapabilities, processorId); } } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, NoSuchObjectException.class) .defaultRuntimeException(); } finally { endFunction("get_database", db != null, ex); } return db; } @Override public void alter_database(final String dbName, final Database newDB) throws TException { startFunction("alter_database " + dbName); boolean success = false; Exception ex = null; RawStore ms = getMS(); Database oldDB = null; Map<String, String> transactionalListenersResponses = Collections.emptyMap(); // Perform the same URI normalization as create_database_core. if (newDB.getLocationUri() != null) { newDB.setLocationUri(wh.getDnsPath(new Path(newDB.getLocationUri())).toString()); } String[] parsedDbName = parseDbName(dbName, conf); // We can replicate into an empty database, in which case newDB will have indication that // it's target of replication but not oldDB. But replication flow will never alter a // database so that oldDB indicates that it's target or replication but not the newDB. So, // relying solely on newDB to check whether the database is target of replication works. boolean isReplicated = isDbReplicationTarget(newDB); try { oldDB = get_database_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME]); if (oldDB == null) { throw new MetaException("Could not alter database \"" + parsedDbName[DB_NAME] + "\". Could not retrieve old definition."); } firePreEvent(new PreAlterDatabaseEvent(oldDB, newDB, this)); ms.openTransaction(); ms.alterDatabase(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], newDB); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ALTER_DATABASE, new AlterDatabaseEvent(oldDB, newDB, true, this, isReplicated)); } success = ms.commitTransaction(); } catch (MetaException|NoSuchObjectException e) { ex = e; throw e; } finally { if (!success) { ms.rollbackTransaction(); } if ((null != oldDB) && (!listeners.isEmpty())) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_DATABASE, new AlterDatabaseEvent(oldDB, newDB, success, this, isReplicated), null, transactionalListenersResponses, ms); } endFunction("alter_database", success, ex); } } private void drop_database_core(RawStore ms, String catName, final String name, final boolean deleteData, final boolean cascade) throws NoSuchObjectException, InvalidOperationException, MetaException, IOException, InvalidObjectException, InvalidInputException { boolean success = false; Database db = null; List<Path> tablePaths = new ArrayList<>(); List<Path> partitionPaths = new ArrayList<>(); Map<String, String> transactionalListenerResponses = Collections.emptyMap(); if (name == null) { throw new MetaException("Database name cannot be null."); } boolean isReplicated = false; try { ms.openTransaction(); db = ms.getDatabase(catName, name); if (db.getType() == DatabaseType.REMOTE) { success = drop_remote_database_core(ms, db); return; } isReplicated = isDbReplicationTarget(db); if (!isInTest && ReplChangeManager.isSourceOfReplication(db)) { throw new InvalidOperationException("can not drop a database which is a source of replication"); } firePreEvent(new PreDropDatabaseEvent(db, this)); String catPrependedName = MetaStoreUtils.prependCatalogToDbName(catName, name, conf); Set<String> uniqueTableNames = new HashSet<>(get_all_tables(catPrependedName)); List<String> allFunctions = get_functions(catPrependedName, "*"); ListStoredProcedureRequest request = new ListStoredProcedureRequest(catName); request.setDbName(name); List<String> allProcedures = get_all_stored_procedures(request); ListPackageRequest pkgRequest = new ListPackageRequest(catName); pkgRequest.setDbName(name); List<String> allPackages = get_all_packages(pkgRequest); if (!cascade) { if (!uniqueTableNames.isEmpty()) { throw new InvalidOperationException( "Database " + db.getName() + " is not empty. One or more tables exist."); } if (!allFunctions.isEmpty()) { throw new InvalidOperationException( "Database " + db.getName() + " is not empty. One or more functions exist."); } if (!allProcedures.isEmpty()) { throw new InvalidOperationException( "Database " + db.getName() + " is not empty. One or more stored procedures exist."); } if (!allPackages.isEmpty()) { throw new InvalidOperationException( "Database " + db.getName() + " is not empty. One or more packages exist."); } } Path path = new Path(db.getLocationUri()).getParent(); if (!wh.isWritable(path)) { throw new MetaException("Database not dropped since its external warehouse location " + path + " is not writable by " + SecurityUtils.getUser()); } path = wh.getDatabaseManagedPath(db).getParent(); if (!wh.isWritable(path)) { throw new MetaException("Database not dropped since its managed warehouse location " + path + " is not writable by " + SecurityUtils.getUser()); } Path databasePath = wh.getDnsPath(wh.getDatabasePath(db)); // drop any functions before dropping db for (String funcName : allFunctions) { drop_function(catPrependedName, funcName); } for (String procName : allProcedures) { drop_stored_procedure(new StoredProcedureRequest(catName, name, procName)); } for (String pkgName : allPackages) { drop_package(new DropPackageRequest(catName, name, pkgName)); } final int tableBatchSize = MetastoreConf.getIntVar(conf, ConfVars.BATCH_RETRIEVE_MAX); // First pass will drop the materialized views List<String> materializedViewNames = getTablesByTypeCore(catName, name, ".*", TableType.MATERIALIZED_VIEW.toString()); int startIndex = 0; // retrieve the tables from the metastore in batches to alleviate memory constraints while (startIndex < materializedViewNames.size()) { int endIndex = Math.min(startIndex + tableBatchSize, materializedViewNames.size()); List<Table> materializedViews; try { materializedViews = ms.getTableObjectsByName(catName, name, materializedViewNames.subList(startIndex, endIndex)); } catch (UnknownDBException e) { throw new MetaException(e.getMessage()); } if (materializedViews != null && !materializedViews.isEmpty()) { for (Table materializedView : materializedViews) { if (materializedView.getSd().getLocation() != null) { Path materializedViewPath = wh.getDnsPath(new Path(materializedView.getSd().getLocation())); if (!FileUtils.isSubdirectory(databasePath.toString(), materializedViewPath.toString())) { if (!wh.isWritable(materializedViewPath.getParent())) { throw new MetaException("Database metadata not deleted since table: " + materializedView.getTableName() + " has a parent location " + materializedViewPath.getParent() + " which is not writable by " + SecurityUtils.getUser()); } tablePaths.add(materializedViewPath); } } // Drop the materialized view but not its data drop_table(name, materializedView.getTableName(), false); // Remove from all tables uniqueTableNames.remove(materializedView.getTableName()); } } startIndex = endIndex; } // drop tables before dropping db List<String> allTables = new ArrayList<>(uniqueTableNames); startIndex = 0; // retrieve the tables from the metastore in batches to alleviate memory constraints while (startIndex < allTables.size()) { int endIndex = Math.min(startIndex + tableBatchSize, allTables.size()); List<Table> tables; try { tables = ms.getTableObjectsByName(catName, name, allTables.subList(startIndex, endIndex)); } catch (UnknownDBException e) { throw new MetaException(e.getMessage()); } if (tables != null && !tables.isEmpty()) { for (Table table : tables) { // If the table is not external and it might not be in a subdirectory of the database // add it's locations to the list of paths to delete Path tablePath = null; boolean tableDataShouldBeDeleted = checkTableDataShouldBeDeleted(table, deleteData); boolean isManagedTable = table.getTableType().equals(TableType.MANAGED_TABLE.toString()); if (table.getSd().getLocation() != null && tableDataShouldBeDeleted) { tablePath = wh.getDnsPath(new Path(table.getSd().getLocation())); if (!isManagedTable) { if (!wh.isWritable(tablePath.getParent())) { throw new MetaException( "Database metadata not deleted since table: " + table.getTableName() + " has a parent location " + tablePath.getParent() + " which is not writable by " + SecurityUtils.getUser()); } tablePaths.add(tablePath); } } // For each partition in each table, drop the partitions and get a list of // partitions' locations which might need to be deleted partitionPaths = dropPartitionsAndGetLocations(ms, catName, name, table.getTableName(), tablePath, tableDataShouldBeDeleted); // Drop the table but not its data drop_table_with_environment_context( MetaStoreUtils.prependCatalogToDbName(table.getCatName(), table.getDbName(), conf), table.getTableName(), false, null, false); } } startIndex = endIndex; } if (ms.dropDatabase(catName, name)) { if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DROP_DATABASE, new DropDatabaseEvent(db, true, this, isReplicated)); } success = ms.commitTransaction(); } } finally { if (!success) { ms.rollbackTransaction(); } else if (deleteData) { // Delete the data in the partitions which have other locations deletePartitionData(partitionPaths, false, db); // Delete the data in the tables which have other locations for (Path tablePath : tablePaths) { deleteTableData(tablePath, false, db); } final Database dbFinal = db; final Path path = (dbFinal.getManagedLocationUri() != null) ? new Path(dbFinal.getManagedLocationUri()) : wh.getDatabaseManagedPath(dbFinal); try { Boolean deleted = UserGroupInformation.getLoginUser().doAs(new PrivilegedExceptionAction<Boolean>() { @Override public Boolean run() throws IOException, MetaException { return wh.deleteDir(path, true, dbFinal); } }); if (!deleted) { LOG.error("Failed to delete database's managed warehouse directory: " + path); } } catch (Exception e) { LOG.error("Failed to delete database's managed warehouse directory: " + path + " " + e.getMessage()); } try { Boolean deleted = UserGroupInformation.getCurrentUser().doAs(new PrivilegedExceptionAction<Boolean>() { @Override public Boolean run() throws MetaException { return wh.deleteDir(new Path(dbFinal.getLocationUri()), true, dbFinal); } }); if (!deleted) { LOG.error("Failed to delete database external warehouse directory " + db.getLocationUri()); } } catch (IOException | InterruptedException | UndeclaredThrowableException e) { LOG.error("Failed to delete the database external warehouse directory: " + db.getLocationUri() + " " + e .getMessage()); } } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_DATABASE, new DropDatabaseEvent(db, success, this, isReplicated), null, transactionalListenerResponses, ms); } } } private boolean drop_remote_database_core(RawStore ms, final Database db) throws MetaException, NoSuchObjectException { boolean success = false; firePreEvent(new PreDropDatabaseEvent(db, this)); if (ms.dropDatabase(db.getCatalogName(), db.getName())) { success = ms.commitTransaction(); } return success; } @Override public void drop_database(final String dbName, final boolean deleteData, final boolean cascade) throws NoSuchObjectException, InvalidOperationException, MetaException { startFunction("drop_database", ": " + dbName); String[] parsedDbName = parseDbName(dbName, conf); if (DEFAULT_CATALOG_NAME.equalsIgnoreCase(parsedDbName[CAT_NAME]) && DEFAULT_DATABASE_NAME.equalsIgnoreCase(parsedDbName[DB_NAME])) { endFunction("drop_database", false, null); throw new MetaException("Can not drop " + DEFAULT_DATABASE_NAME + " database in catalog " + DEFAULT_CATALOG_NAME); } boolean success = false; Exception ex = null; try { drop_database_core(getMS(), parsedDbName[CAT_NAME], parsedDbName[DB_NAME], deleteData, cascade); success = true; } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(NoSuchObjectException.class, InvalidOperationException.class, MetaException.class) .defaultMetaException(); } finally { endFunction("drop_database", success, ex); } } @Override public List<String> get_databases(final String pattern) throws MetaException { startFunction("get_databases", ": " + pattern); String[] parsedDbNamed = parseDbName(pattern, conf); List<String> ret = null; Exception ex = null; try { if (parsedDbNamed[DB_NAME] == null) { ret = getMS().getAllDatabases(parsedDbNamed[CAT_NAME]); ret = FilterUtils.filterDbNamesIfEnabled(isServerFilterEnabled, filterHook, ret); } else { ret = getMS().getDatabases(parsedDbNamed[CAT_NAME], parsedDbNamed[DB_NAME]); ret = FilterUtils.filterDbNamesIfEnabled(isServerFilterEnabled, filterHook, ret); } } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_databases", ret != null, ex); } return ret; } @Override public List<String> get_all_databases() throws MetaException { // get_databases filters results already. No need to filter here return get_databases(MetaStoreUtils.prependCatalogToDbName(null, null, conf)); } private void create_dataconnector_core(RawStore ms, final DataConnector connector) throws AlreadyExistsException, InvalidObjectException, MetaException { if (!MetaStoreUtils.validateName(connector.getName(), conf)) { throw new InvalidObjectException(connector.getName() + " is not a valid dataconnector name"); } if (connector.getOwnerName() == null){ try { connector.setOwnerName(SecurityUtils.getUGI().getShortUserName()); }catch (Exception e){ LOG.warn("Failed to get owner name for create dataconnector operation.", e); } } long time = System.currentTimeMillis()/1000; connector.setCreateTime((int) time); boolean success = false; Map<String, String> transactionalListenersResponses = Collections.emptyMap(); try { firePreEvent(new PreCreateDataConnectorEvent(connector, this)); ms.openTransaction(); ms.createDataConnector(connector); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.CREATE_DATACONNECTOR, new CreateDataConnectorEvent(connector, true, this)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.CREATE_DATACONNECTOR, new CreateDataConnectorEvent(connector, success, this), null, transactionalListenersResponses, ms); } } } @Override public void create_dataconnector(final DataConnector connector) throws AlreadyExistsException, InvalidObjectException, MetaException { startFunction("create_dataconnector", ": " + connector.toString()); boolean success = false; Exception ex = null; try { try { if (null != get_dataconnector_core(connector.getName())) { throw new AlreadyExistsException("DataConnector " + connector.getName() + " already exists"); } } catch (NoSuchObjectException e) { // expected } if (testTimeoutEnabled) { try { Thread.sleep(testTimeoutValue); } catch (InterruptedException e) { // do nothing } Deadline.checkTimeout(); } create_dataconnector_core(getMS(), connector); success = true; } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(MetaException.class, InvalidObjectException.class, AlreadyExistsException.class) .defaultMetaException(); } finally { endFunction("create_connector", success, ex); } } @Override public DataConnector get_dataconnector_core(final String name) throws NoSuchObjectException, MetaException { DataConnector connector = null; if (name == null) { throw new MetaException("Data connector name cannot be null."); } try { connector = getMS().getDataConnector(name); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class, NoSuchObjectException.class) .defaultRuntimeException(); } return connector; } @Override public DataConnector get_dataconnector_req(GetDataConnectorRequest request) throws NoSuchObjectException, MetaException { startFunction("get_dataconnector", ": " + request.getConnectorName()); DataConnector connector = null; Exception ex = null; try { connector = get_dataconnector_core(request.getConnectorName()); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, NoSuchObjectException.class) .defaultRuntimeException(); } finally { endFunction("get_dataconnector", connector != null, ex); } return connector; } @Override public void alter_dataconnector(final String dcName, final DataConnector newDC) throws TException { startFunction("alter_dataconnector " + dcName); boolean success = false; Exception ex = null; RawStore ms = getMS(); DataConnector oldDC = null; Map<String, String> transactionalListenersResponses = Collections.emptyMap(); try { oldDC = get_dataconnector_core(dcName); if (oldDC == null) { throw new MetaException("Could not alter dataconnector \"" + dcName + "\". Could not retrieve old definition."); } // firePreEvent(new PreAlterDatabaseEvent(oldDC, newDC, this)); ms.openTransaction(); ms.alterDataConnector(dcName, newDC); /* if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ALTER_DATACONNECTOR, new AlterDataConnectorEvent(oldDC, newDC, true, this)); } */ success = ms.commitTransaction(); } catch (MetaException|NoSuchObjectException e) { ex = e; throw e; } finally { if (!success) { ms.rollbackTransaction(); } /* if ((null != oldDC) && (!listeners.isEmpty())) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_DATACONNECTOR, new AlterDataConnectorEvent(oldDC, newDC, success, this), null, transactionalListenersResponses, ms); } */ endFunction("alter_database", success, ex); } } @Override public List<String> get_dataconnectors() throws MetaException { startFunction("get_dataconnectors"); List<String> ret = null; Exception ex = null; try { ret = getMS().getAllDataConnectorNames(); ret = FilterUtils.filterDataConnectorsIfEnabled(isServerFilterEnabled, filterHook, ret); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_dataconnectors", ret != null, ex); } return ret; } @Override public void drop_dataconnector(final String dcName, boolean ifNotExists, boolean checkReferences) throws NoSuchObjectException, InvalidOperationException, MetaException { startFunction("drop_dataconnector", ": " + dcName); boolean success = false; DataConnector connector = null; Exception ex = null; RawStore ms = getMS(); try { ms.openTransaction(); connector = getMS().getDataConnector(dcName); if (connector == null) { if (!ifNotExists) { throw new NoSuchObjectException("DataConnector " + dcName + " doesn't exist"); } else { return; } } // TODO find DBs with references to this connector // if any existing references and checkReferences=true, do not drop // firePreEvent(new PreDropTableEvent(tbl, deleteData, this)); if (!ms.dropDataConnector(dcName)) { throw new MetaException("Unable to drop dataconnector " + dcName); } else { /* // TODO if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DROP_TABLE, new DropTableEvent(tbl, true, deleteData, this, isReplicated), envContext); } */ success = ms.commitTransaction(); } } finally { if (!success) { ms.rollbackTransaction(); } /* if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_TABLE, new DropTableEvent(tbl, success, deleteData, this, isReplicated), envContext, transactionalListenerResponses, ms); } */ endFunction("drop_dataconnector", success, ex); } } private void create_type_core(final RawStore ms, final Type type) throws AlreadyExistsException, MetaException, InvalidObjectException { if (!MetaStoreUtils.validateName(type.getName(), null)) { throw new InvalidObjectException("Invalid type name"); } boolean success = false; try { ms.openTransaction(); if (is_type_exists(ms, type.getName())) { throw new AlreadyExistsException("Type " + type.getName() + " already exists"); } ms.createType(type); success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } } } @Override public boolean create_type(final Type type) throws AlreadyExistsException, MetaException, InvalidObjectException { startFunction("create_type", ": " + type.toString()); boolean success = false; Exception ex = null; try { create_type_core(getMS(), type); success = true; } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(MetaException.class, InvalidObjectException.class, AlreadyExistsException.class) .defaultMetaException(); } finally { endFunction("create_type", success, ex); } return success; } @Override public Type get_type(final String name) throws MetaException, NoSuchObjectException { startFunction("get_type", ": " + name); Type ret = null; Exception ex = null; try { ret = getMS().getType(name); if (null == ret) { throw new NoSuchObjectException("Type \"" + name + "\" not found."); } } catch (Exception e) { ex = e; throwMetaException(e); } finally { endFunction("get_type", ret != null, ex); } return ret; } private boolean is_type_exists(RawStore ms, String typeName) throws MetaException { return (ms.getType(typeName) != null); } @Override public boolean drop_type(final String name) throws MetaException, NoSuchObjectException { startFunction("drop_type", ": " + name); boolean success = false; Exception ex = null; try { // TODO:pc validate that there are no types that refer to this success = getMS().dropType(name); } catch (Exception e) { ex = e; throwMetaException(e); } finally { endFunction("drop_type", success, ex); } return success; } @Override public Map<String, Type> get_type_all(String name) throws MetaException { // TODO Auto-generated method stub startFunction("get_type_all", ": " + name); endFunction("get_type_all", false, null); throw new MetaException("Not yet implemented"); } @Override public Table translate_table_dryrun(final Table tbl) throws AlreadyExistsException, MetaException, InvalidObjectException, InvalidInputException { Table transformedTbl = null; if (!tbl.isSetCatName()) { tbl.setCatName(getDefaultCatalog(conf)); } if (transformer != null) { transformedTbl = transformer.transformCreateTable(tbl, null, null); } return transformedTbl != null ? transformedTbl : tbl; } private void create_table_core(final RawStore ms, final Table tbl, final EnvironmentContext envContext) throws AlreadyExistsException, MetaException, InvalidObjectException, NoSuchObjectException, InvalidInputException { CreateTableRequest req = new CreateTableRequest(tbl); req.setEnvContext(envContext); create_table_core(ms, req); } private void create_table_core(final RawStore ms, final Table tbl, final EnvironmentContext envContext, List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys, List<SQLUniqueConstraint> uniqueConstraints, List<SQLNotNullConstraint> notNullConstraints, List<SQLDefaultConstraint> defaultConstraints, List<SQLCheckConstraint> checkConstraints, List<String> processorCapabilities, String processorIdentifier) throws AlreadyExistsException, MetaException, InvalidObjectException, NoSuchObjectException, InvalidInputException { CreateTableRequest req = new CreateTableRequest(tbl); if (envContext != null) { req.setEnvContext(envContext); } if (primaryKeys != null) { req.setPrimaryKeys(primaryKeys); } if (foreignKeys != null) { req.setForeignKeys(foreignKeys); } if (uniqueConstraints != null) { req.setUniqueConstraints(uniqueConstraints); } if (notNullConstraints != null) { req.setNotNullConstraints(notNullConstraints); } if (defaultConstraints != null) { req.setDefaultConstraints(defaultConstraints); } if (checkConstraints != null) { req.setCheckConstraints(checkConstraints); } if (processorCapabilities != null) { req.setProcessorCapabilities(processorCapabilities); req.setProcessorIdentifier(processorIdentifier); } create_table_core(ms, req); } private void create_table_core(final RawStore ms, final CreateTableRequest req) throws AlreadyExistsException, MetaException, InvalidObjectException, NoSuchObjectException, InvalidInputException { ColumnStatistics colStats = null; Table tbl = req.getTable(); EnvironmentContext envContext = req.getEnvContext(); SQLAllTableConstraints constraints = new SQLAllTableConstraints(); constraints.setPrimaryKeys(req.getPrimaryKeys()); constraints.setForeignKeys(req.getForeignKeys()); constraints.setUniqueConstraints(req.getUniqueConstraints()); constraints.setDefaultConstraints(req.getDefaultConstraints()); constraints.setCheckConstraints(req.getCheckConstraints()); constraints.setNotNullConstraints(req.getNotNullConstraints()); List<String> processorCapabilities = req.getProcessorCapabilities(); String processorId = req.getProcessorIdentifier(); // To preserve backward compatibility throw MetaException in case of null database if (tbl.getDbName() == null) { throw new MetaException("Null database name is not allowed"); } if (!MetaStoreUtils.validateName(tbl.getTableName(), conf)) { throw new InvalidObjectException(tbl.getTableName() + " is not a valid object name"); } if (!tbl.isSetCatName()) { tbl.setCatName(getDefaultCatalog(conf)); } Database db = get_database_core(tbl.getCatName(), tbl.getDbName()); if (db != null && db.getType().equals(DatabaseType.REMOTE)) { // HIVE-24425: Create table in REMOTE db should fail throw new MetaException("Create table in REMOTE database " + db.getName() + " is not allowed"); } if (transformer != null) { tbl = transformer.transformCreateTable(tbl, processorCapabilities, processorId); } if (tbl.getParameters() != null) { tbl.getParameters().remove(TABLE_IS_CTAS); } // If the given table has column statistics, save it here. We will update it later. // We don't want it to be part of the Table object being created, lest the create table // event will also have the col stats which we don't want. if (tbl.isSetColStats()) { colStats = tbl.getColStats(); tbl.unsetColStats(); } String validate = MetaStoreServerUtils.validateTblColumns(tbl.getSd().getCols()); if (validate != null) { throw new InvalidObjectException("Invalid column " + validate); } if (tbl.getPartitionKeys() != null) { validate = MetaStoreServerUtils.validateTblColumns(tbl.getPartitionKeys()); if (validate != null) { throw new InvalidObjectException("Invalid partition column " + validate); } } if (tbl.isSetId()) { LOG.debug("Id shouldn't be set but table {}.{} has the Id set to {}. Id is ignored.", tbl.getDbName(), tbl.getTableName(), tbl.getId()); tbl.unsetId(); } SkewedInfo skew = tbl.getSd().getSkewedInfo(); if (skew != null) { validate = MetaStoreServerUtils.validateSkewedColNames(skew.getSkewedColNames()); if (validate != null) { throw new InvalidObjectException("Invalid skew column " + validate); } validate = MetaStoreServerUtils.validateSkewedColNamesSubsetCol( skew.getSkewedColNames(), tbl.getSd().getCols()); if (validate != null) { throw new InvalidObjectException("Invalid skew column " + validate); } } Map<String, String> transactionalListenerResponses = Collections.emptyMap(); Path tblPath = null; boolean success = false, madeDir = false; boolean isReplicated = false; try { firePreEvent(new PreCreateTableEvent(tbl, this)); ms.openTransaction(); db = ms.getDatabase(tbl.getCatName(), tbl.getDbName()); isReplicated = isDbReplicationTarget(db); // get_table checks whether database exists, it should be moved here if (is_table_exists(ms, tbl.getCatName(), tbl.getDbName(), tbl.getTableName())) { throw new AlreadyExistsException("Table " + getCatalogQualifiedTableName(tbl) + " already exists"); } if (!TableType.VIRTUAL_VIEW.toString().equals(tbl.getTableType())) { if (tbl.getSd().getLocation() == null || tbl.getSd().getLocation().isEmpty()) { tblPath = wh.getDefaultTablePath(db, tbl); } else { if (!isExternal(tbl) && !MetaStoreUtils.isNonNativeTable(tbl)) { LOG.warn("Location: " + tbl.getSd().getLocation() + " specified for non-external table:" + tbl.getTableName()); } tblPath = wh.getDnsPath(new Path(tbl.getSd().getLocation())); } tbl.getSd().setLocation(tblPath.toString()); } if (tblPath != null) { if (!wh.isDir(tblPath)) { if (!wh.mkdirs(tblPath)) { throw new MetaException(tblPath + " is not a directory or unable to create one"); } madeDir = true; } } if (MetastoreConf.getBoolVar(conf, ConfVars.STATS_AUTO_GATHER) && !MetaStoreUtils.isView(tbl)) { MetaStoreServerUtils.updateTableStatsSlow(db, tbl, wh, madeDir, false, envContext); } // set create time long time = System.currentTimeMillis() / 1000; tbl.setCreateTime((int) time); if (tbl.getParameters() == null || tbl.getParameters().get(hive_metastoreConstants.DDL_TIME) == null) { tbl.putToParameters(hive_metastoreConstants.DDL_TIME, Long.toString(time)); } if (CollectionUtils.isEmpty(constraints.getPrimaryKeys()) && CollectionUtils.isEmpty(constraints.getForeignKeys()) && CollectionUtils.isEmpty(constraints.getUniqueConstraints())&& CollectionUtils.isEmpty(constraints.getNotNullConstraints())&& CollectionUtils.isEmpty(constraints.getDefaultConstraints()) && CollectionUtils.isEmpty(constraints.getCheckConstraints())) { ms.createTable(tbl); } else { final String catName = tbl.getCatName(); // Check that constraints have catalog name properly set first if (CollectionUtils.isNotEmpty(constraints.getPrimaryKeys()) && !constraints.getPrimaryKeys().get(0).isSetCatName()) { constraints.getPrimaryKeys().forEach(constraint -> constraint.setCatName(catName)); } if (CollectionUtils.isNotEmpty(constraints.getForeignKeys()) && !constraints.getForeignKeys().get(0).isSetCatName()) { constraints.getForeignKeys().forEach(constraint -> constraint.setCatName(catName)); } if (CollectionUtils.isNotEmpty(constraints.getUniqueConstraints()) && !constraints.getUniqueConstraints().get(0).isSetCatName()) { constraints.getUniqueConstraints().forEach(constraint -> constraint.setCatName(catName)); } if (CollectionUtils.isNotEmpty(constraints.getNotNullConstraints()) && !constraints.getNotNullConstraints().get(0).isSetCatName()) { constraints.getNotNullConstraints().forEach(constraint -> constraint.setCatName(catName)); } if (CollectionUtils.isNotEmpty(constraints.getDefaultConstraints()) && !constraints.getDefaultConstraints().get(0).isSetCatName()) { constraints.getDefaultConstraints().forEach(constraint -> constraint.setCatName(catName)); } if (CollectionUtils.isNotEmpty(constraints.getCheckConstraints()) && !constraints.getCheckConstraints().get(0).isSetCatName()) { constraints.getCheckConstraints().forEach(constraint -> constraint.setCatName(catName)); } // Set constraint name if null before sending to listener constraints = ms.createTableWithConstraints(tbl, constraints); } if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.CREATE_TABLE, new CreateTableEvent(tbl, true, this, isReplicated), envContext); if (CollectionUtils.isNotEmpty(constraints.getPrimaryKeys())) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_PRIMARYKEY, new AddPrimaryKeyEvent(constraints.getPrimaryKeys(), true, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getForeignKeys())) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_FOREIGNKEY, new AddForeignKeyEvent(constraints.getForeignKeys(), true, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getUniqueConstraints())) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_UNIQUECONSTRAINT, new AddUniqueConstraintEvent(constraints.getUniqueConstraints(), true, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getNotNullConstraints())) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_NOTNULLCONSTRAINT, new AddNotNullConstraintEvent(constraints.getNotNullConstraints(), true, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getCheckConstraints())) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_CHECKCONSTRAINT, new AddCheckConstraintEvent(constraints.getCheckConstraints(), true, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getDefaultConstraints())) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_DEFAULTCONSTRAINT, new AddDefaultConstraintEvent(constraints.getDefaultConstraints(), true, this), envContext); } } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); if (madeDir) { wh.deleteDir(tblPath, true, false, ReplChangeManager.shouldEnableCm(db, tbl)); } } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.CREATE_TABLE, new CreateTableEvent(tbl, success, this, isReplicated), envContext, transactionalListenerResponses, ms); if (CollectionUtils.isNotEmpty(constraints.getPrimaryKeys())) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_PRIMARYKEY, new AddPrimaryKeyEvent(constraints.getPrimaryKeys(), success, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getForeignKeys())) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_FOREIGNKEY, new AddForeignKeyEvent(constraints.getForeignKeys(), success, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getUniqueConstraints())) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_UNIQUECONSTRAINT, new AddUniqueConstraintEvent(constraints.getUniqueConstraints(), success, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getNotNullConstraints())) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_NOTNULLCONSTRAINT, new AddNotNullConstraintEvent(constraints.getNotNullConstraints(), success, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getDefaultConstraints())) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_DEFAULTCONSTRAINT, new AddDefaultConstraintEvent(constraints.getDefaultConstraints(), success, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getCheckConstraints())) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_CHECKCONSTRAINT, new AddCheckConstraintEvent(constraints.getCheckConstraints(), success, this), envContext); } } } // If the table has column statistics, update it into the metastore. We need a valid // writeId list to update column statistics for a transactional table. But during bootstrap // replication, where we use this feature, we do not have a valid writeId list which was // used to update the stats. But we know for sure that the writeId associated with the // stats was valid then (otherwise stats update would have failed on the source). So, craft // a valid transaction list with only that writeId and use it to update the stats. if (colStats != null) { long writeId = tbl.getWriteId(); String validWriteIds = null; if (writeId > 0) { ValidWriteIdList validWriteIdList = new ValidReaderWriteIdList(TableName.getDbTable(tbl.getDbName(), tbl.getTableName()), new long[0], new BitSet(), writeId); validWriteIds = validWriteIdList.toString(); } updateTableColumnStatsInternal(colStats, validWriteIds, tbl.getWriteId()); } } @Override public void create_table(final Table tbl) throws AlreadyExistsException, MetaException, InvalidObjectException, InvalidInputException { create_table_with_environment_context(tbl, null); } @Override public void create_table_with_environment_context(final Table tbl, final EnvironmentContext envContext) throws AlreadyExistsException, MetaException, InvalidObjectException, InvalidInputException { startFunction("create_table", ": " + tbl.toString()); boolean success = false; Exception ex = null; try { create_table_core(getMS(), tbl, envContext); success = true; } catch (Exception e) { LOG.warn("create_table_with_environment_context got ", e); ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidObjectException.class) .throwIfInstance(AlreadyExistsException.class, InvalidInputException.class) .convertIfInstance(NoSuchObjectException.class, InvalidObjectException.class) .defaultMetaException(); } finally { endFunction("create_table", success, ex, tbl.getTableName()); } } @Override public void create_table_req(final CreateTableRequest req) throws AlreadyExistsException, MetaException, InvalidObjectException, InvalidInputException { Table tbl = req.getTable(); startFunction("create_table_req", ": " + tbl.toString()); boolean success = false; Exception ex = null; try { create_table_core(getMS(), req); success = true; } catch (Exception e) { LOG.warn("create_table_req got ", e); ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidObjectException.class) .throwIfInstance(AlreadyExistsException.class, InvalidInputException.class) .convertIfInstance(NoSuchObjectException.class, InvalidObjectException.class) .defaultMetaException(); } finally { endFunction("create_table_req", success, ex, tbl.getTableName()); } } @Override public void create_table_with_constraints(final Table tbl, final List<SQLPrimaryKey> primaryKeys, final List<SQLForeignKey> foreignKeys, List<SQLUniqueConstraint> uniqueConstraints, List<SQLNotNullConstraint> notNullConstraints, List<SQLDefaultConstraint> defaultConstraints, List<SQLCheckConstraint> checkConstraints) throws AlreadyExistsException, MetaException, InvalidObjectException, InvalidInputException { startFunction("create_table", ": " + tbl.toString()); boolean success = false; Exception ex = null; try { CreateTableRequest req = new CreateTableRequest(tbl); req.setPrimaryKeys(primaryKeys); req.setForeignKeys(foreignKeys); req.setUniqueConstraints(uniqueConstraints); req.setNotNullConstraints(notNullConstraints); req.setDefaultConstraints(defaultConstraints); req.setCheckConstraints(checkConstraints); create_table_req(req); success = true; } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidObjectException.class) .throwIfInstance(AlreadyExistsException.class, InvalidInputException.class) .defaultMetaException(); } finally { endFunction("create_table_with_constraints", success, ex, tbl.getTableName()); } } @Override public void drop_constraint(DropConstraintRequest req) throws MetaException, InvalidObjectException { String catName = req.isSetCatName() ? req.getCatName() : getDefaultCatalog(conf); String dbName = req.getDbname(); String tableName = req.getTablename(); String constraintName = req.getConstraintname(); startFunction("drop_constraint", ": " + constraintName); boolean success = false; Exception ex = null; RawStore ms = getMS(); try { ms.openTransaction(); ms.dropConstraint(catName, dbName, tableName, constraintName); if (transactionalListeners.size() > 0) { DropConstraintEvent dropConstraintEvent = new DropConstraintEvent(catName, dbName, tableName, constraintName, true, this); for (MetaStoreEventListener transactionalListener : transactionalListeners) { transactionalListener.onDropConstraint(dropConstraintEvent); } } success = ms.commitTransaction(); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class) .convertIfInstance(NoSuchObjectException.class, InvalidObjectException.class) .defaultMetaException(); } finally { if (!success) { ms.rollbackTransaction(); } else { for (MetaStoreEventListener listener : listeners) { DropConstraintEvent dropConstraintEvent = new DropConstraintEvent(catName, dbName, tableName, constraintName, true, this); listener.onDropConstraint(dropConstraintEvent); } } endFunction("drop_constraint", success, ex, constraintName); } } @Override public void add_primary_key(AddPrimaryKeyRequest req) throws MetaException, InvalidObjectException { List<SQLPrimaryKey> primaryKeyCols = req.getPrimaryKeyCols(); String constraintName = (CollectionUtils.isNotEmpty(primaryKeyCols)) ? primaryKeyCols.get(0).getPk_name() : "null"; startFunction("add_primary_key", ": " + constraintName); boolean success = false; Exception ex = null; if (CollectionUtils.isNotEmpty(primaryKeyCols) && !primaryKeyCols.get(0).isSetCatName()) { String defaultCat = getDefaultCatalog(conf); primaryKeyCols.forEach(pk -> pk.setCatName(defaultCat)); } RawStore ms = getMS(); try { ms.openTransaction(); List<SQLPrimaryKey> primaryKeys = ms.addPrimaryKeys(primaryKeyCols); if (transactionalListeners.size() > 0) { if (CollectionUtils.isNotEmpty(primaryKeys)) { AddPrimaryKeyEvent addPrimaryKeyEvent = new AddPrimaryKeyEvent(primaryKeys, true, this); for (MetaStoreEventListener transactionalListener : transactionalListeners) { transactionalListener.onAddPrimaryKey(addPrimaryKeyEvent); } } } success = ms.commitTransaction(); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidObjectException.class) .defaultMetaException(); } finally { if (!success) { ms.rollbackTransaction(); } else if (primaryKeyCols != null && primaryKeyCols.size() > 0) { for (MetaStoreEventListener listener : listeners) { AddPrimaryKeyEvent addPrimaryKeyEvent = new AddPrimaryKeyEvent(primaryKeyCols, true, this); listener.onAddPrimaryKey(addPrimaryKeyEvent); } } endFunction("add_primary_key", success, ex, constraintName); } } @Override public void add_foreign_key(AddForeignKeyRequest req) throws MetaException, InvalidObjectException { List<SQLForeignKey> foreignKeys = req.getForeignKeyCols(); String constraintName = CollectionUtils.isNotEmpty(foreignKeys) ? foreignKeys.get(0).getFk_name() : "null"; startFunction("add_foreign_key", ": " + constraintName); boolean success = false; Exception ex = null; if (CollectionUtils.isNotEmpty(foreignKeys) && !foreignKeys.get(0).isSetCatName()) { String defaultCat = getDefaultCatalog(conf); foreignKeys.forEach(pk -> pk.setCatName(defaultCat)); } RawStore ms = getMS(); try { ms.openTransaction(); foreignKeys = ms.addForeignKeys(foreignKeys); if (transactionalListeners.size() > 0) { if (CollectionUtils.isNotEmpty(foreignKeys)) { AddForeignKeyEvent addForeignKeyEvent = new AddForeignKeyEvent(foreignKeys, true, this); for (MetaStoreEventListener transactionalListener : transactionalListeners) { transactionalListener.onAddForeignKey(addForeignKeyEvent); } } } success = ms.commitTransaction(); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidObjectException.class) .defaultMetaException(); } finally { if (!success) { ms.rollbackTransaction(); } else if (CollectionUtils.isNotEmpty(foreignKeys)) { for (MetaStoreEventListener listener : listeners) { AddForeignKeyEvent addForeignKeyEvent = new AddForeignKeyEvent(foreignKeys, true, this); listener.onAddForeignKey(addForeignKeyEvent); } } endFunction("add_foreign_key", success, ex, constraintName); } } @Override public void add_unique_constraint(AddUniqueConstraintRequest req) throws MetaException, InvalidObjectException { List<SQLUniqueConstraint> uniqueConstraints = req.getUniqueConstraintCols(); String constraintName = (uniqueConstraints != null && uniqueConstraints.size() > 0) ? uniqueConstraints.get(0).getUk_name() : "null"; startFunction("add_unique_constraint", ": " + constraintName); boolean success = false; Exception ex = null; if (!uniqueConstraints.isEmpty() && !uniqueConstraints.get(0).isSetCatName()) { String defaultCat = getDefaultCatalog(conf); uniqueConstraints.forEach(pk -> pk.setCatName(defaultCat)); } RawStore ms = getMS(); try { ms.openTransaction(); uniqueConstraints = ms.addUniqueConstraints(uniqueConstraints); if (transactionalListeners.size() > 0) { if (CollectionUtils.isNotEmpty(uniqueConstraints)) { AddUniqueConstraintEvent addUniqueConstraintEvent = new AddUniqueConstraintEvent(uniqueConstraints, true, this); for (MetaStoreEventListener transactionalListener : transactionalListeners) { transactionalListener.onAddUniqueConstraint(addUniqueConstraintEvent); } } } success = ms.commitTransaction(); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidObjectException.class) .defaultMetaException(); } finally { if (!success) { ms.rollbackTransaction(); } else if (CollectionUtils.isNotEmpty(uniqueConstraints)) { for (MetaStoreEventListener listener : listeners) { AddUniqueConstraintEvent addUniqueConstraintEvent = new AddUniqueConstraintEvent(uniqueConstraints, true, this); listener.onAddUniqueConstraint(addUniqueConstraintEvent); } } endFunction("add_unique_constraint", success, ex, constraintName); } } @Override public void add_not_null_constraint(AddNotNullConstraintRequest req) throws MetaException, InvalidObjectException { List<SQLNotNullConstraint> notNullConstraints = req.getNotNullConstraintCols(); String constraintName = (notNullConstraints != null && notNullConstraints.size() > 0) ? notNullConstraints.get(0).getNn_name() : "null"; startFunction("add_not_null_constraint", ": " + constraintName); boolean success = false; Exception ex = null; if (!notNullConstraints.isEmpty() && !notNullConstraints.get(0).isSetCatName()) { String defaultCat = getDefaultCatalog(conf); notNullConstraints.forEach(pk -> pk.setCatName(defaultCat)); } RawStore ms = getMS(); try { ms.openTransaction(); notNullConstraints = ms.addNotNullConstraints(notNullConstraints); if (transactionalListeners.size() > 0) { if (CollectionUtils.isNotEmpty(notNullConstraints)) { AddNotNullConstraintEvent addNotNullConstraintEvent = new AddNotNullConstraintEvent(notNullConstraints, true, this); for (MetaStoreEventListener transactionalListener : transactionalListeners) { transactionalListener.onAddNotNullConstraint(addNotNullConstraintEvent); } } } success = ms.commitTransaction(); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidObjectException.class).defaultMetaException(); } finally { if (!success) { ms.rollbackTransaction(); } else if (CollectionUtils.isNotEmpty(notNullConstraints)) { for (MetaStoreEventListener listener : listeners) { AddNotNullConstraintEvent addNotNullConstraintEvent = new AddNotNullConstraintEvent(notNullConstraints, true, this); listener.onAddNotNullConstraint(addNotNullConstraintEvent); } } endFunction("add_not_null_constraint", success, ex, constraintName); } } @Override public void add_default_constraint(AddDefaultConstraintRequest req) throws MetaException, InvalidObjectException { List<SQLDefaultConstraint> defaultConstraints = req.getDefaultConstraintCols(); String constraintName = CollectionUtils.isNotEmpty(defaultConstraints) ? defaultConstraints.get(0).getDc_name() : "null"; startFunction("add_default_constraint", ": " + constraintName); boolean success = false; Exception ex = null; if (!defaultConstraints.isEmpty() && !defaultConstraints.get(0).isSetCatName()) { String defaultCat = getDefaultCatalog(conf); defaultConstraints.forEach(pk -> pk.setCatName(defaultCat)); } RawStore ms = getMS(); try { ms.openTransaction(); defaultConstraints = ms.addDefaultConstraints(defaultConstraints); if (transactionalListeners.size() > 0) { if (CollectionUtils.isNotEmpty(defaultConstraints)) { AddDefaultConstraintEvent addDefaultConstraintEvent = new AddDefaultConstraintEvent(defaultConstraints, true, this); for (MetaStoreEventListener transactionalListener : transactionalListeners) { transactionalListener.onAddDefaultConstraint(addDefaultConstraintEvent); } } } success = ms.commitTransaction(); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidObjectException.class).defaultMetaException(); } finally { if (!success) { ms.rollbackTransaction(); } else if (CollectionUtils.isNotEmpty(defaultConstraints)) { for (MetaStoreEventListener listener : listeners) { AddDefaultConstraintEvent addDefaultConstraintEvent = new AddDefaultConstraintEvent(defaultConstraints, true, this); listener.onAddDefaultConstraint(addDefaultConstraintEvent); } } endFunction("add_default_constraint", success, ex, constraintName); } } @Override public void add_check_constraint(AddCheckConstraintRequest req) throws MetaException, InvalidObjectException { List<SQLCheckConstraint> checkConstraints= req.getCheckConstraintCols(); String constraintName = CollectionUtils.isNotEmpty(checkConstraints) ? checkConstraints.get(0).getDc_name() : "null"; startFunction("add_check_constraint", ": " + constraintName); boolean success = false; Exception ex = null; if (!checkConstraints.isEmpty() && !checkConstraints.get(0).isSetCatName()) { String defaultCat = getDefaultCatalog(conf); checkConstraints.forEach(pk -> pk.setCatName(defaultCat)); } RawStore ms = getMS(); try { ms.openTransaction(); checkConstraints = ms.addCheckConstraints(checkConstraints); if (transactionalListeners.size() > 0) { if (CollectionUtils.isNotEmpty(checkConstraints)) { AddCheckConstraintEvent addcheckConstraintEvent = new AddCheckConstraintEvent(checkConstraints, true, this); for (MetaStoreEventListener transactionalListener : transactionalListeners) { transactionalListener.onAddCheckConstraint(addcheckConstraintEvent); } } } success = ms.commitTransaction(); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidObjectException.class).defaultMetaException(); } finally { if (!success) { ms.rollbackTransaction(); } else if (CollectionUtils.isNotEmpty(checkConstraints)) { for (MetaStoreEventListener listener : listeners) { AddCheckConstraintEvent addCheckConstraintEvent = new AddCheckConstraintEvent(checkConstraints, true, this); listener.onAddCheckConstraint(addCheckConstraintEvent); } } endFunction("add_check_constraint", success, ex, constraintName); } } private boolean is_table_exists(RawStore ms, String catName, String dbname, String name) throws MetaException { return (ms.getTable(catName, dbname, name, null) != null); } private boolean drop_table_core(final RawStore ms, final String catName, final String dbname, final String name, final boolean deleteData, final EnvironmentContext envContext, final String indexName, boolean dropPartitions) throws NoSuchObjectException, MetaException, IOException, InvalidObjectException, InvalidInputException { boolean success = false; boolean tableDataShouldBeDeleted = false; Path tblPath = null; List<Path> partPaths = null; Table tbl = null; boolean ifPurge = false; Map<String, String> transactionalListenerResponses = Collections.emptyMap(); Database db = null; boolean isReplicated = false; try { ms.openTransaction(); // HIVE-25282: Drop/Alter table in REMOTE db should fail db = ms.getDatabase(catName, dbname); if (db.getType() == DatabaseType.REMOTE) { throw new MetaException("Drop table in REMOTE database " + db.getName() + " is not allowed"); } isReplicated = isDbReplicationTarget(db); // drop any partitions GetTableRequest req = new GetTableRequest(dbname,name); req.setCatName(catName); tbl = get_table_core(req); if (tbl == null) { throw new NoSuchObjectException(name + " doesn't exist"); } // Check if table is part of a materialized view. // If it is, it cannot be dropped. List<String> isPartOfMV = ms.isPartOfMaterializedView(catName, dbname, name); if (!isPartOfMV.isEmpty()) { throw new MetaException(String.format("Cannot drop table as it is used in the following materialized" + " views %s%n", isPartOfMV)); } if (tbl.getSd() == null) { throw new MetaException("Table metadata is corrupted"); } ifPurge = isMustPurge(envContext, tbl); firePreEvent(new PreDropTableEvent(tbl, deleteData, this)); tableDataShouldBeDeleted = checkTableDataShouldBeDeleted(tbl, deleteData); if (tbl.getSd().getLocation() != null) { tblPath = new Path(tbl.getSd().getLocation()); if (!wh.isWritable(tblPath.getParent())) { String target = indexName == null ? "Table" : "Index table"; throw new MetaException(target + " metadata not deleted since " + tblPath.getParent() + " is not writable by " + SecurityUtils.getUser()); } } // Drop the partitions and get a list of locations which need to be deleted // In case of drop database cascade we need not to drop the partitions, they are already dropped. if (dropPartitions) { partPaths = dropPartitionsAndGetLocations(ms, catName, dbname, name, tblPath, tableDataShouldBeDeleted); } // Drop any constraints on the table ms.dropConstraint(catName, dbname, name, null, true); if (!ms.dropTable(catName, dbname, name)) { String tableName = TableName.getQualified(catName, dbname, name); throw new MetaException(indexName == null ? "Unable to drop table " + tableName: "Unable to drop index table " + tableName + " for index " + indexName); } else { if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DROP_TABLE, new DropTableEvent(tbl, true, deleteData, this, isReplicated), envContext); } success = ms.commitTransaction(); } } finally { if (!success) { ms.rollbackTransaction(); } else if (tableDataShouldBeDeleted) { // Data needs deletion. Check if trash may be skipped. // Delete the data in the partitions which have other locations deletePartitionData(partPaths, ifPurge, ReplChangeManager.shouldEnableCm(db, tbl)); // Delete the data in the table deleteTableData(tblPath, ifPurge, ReplChangeManager.shouldEnableCm(db, tbl)); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_TABLE, new DropTableEvent(tbl, success, deleteData, this, isReplicated), envContext, transactionalListenerResponses, ms); } } return success; } private boolean checkTableDataShouldBeDeleted(Table tbl, boolean deleteData) { if (deleteData && isExternal(tbl)) { // External table data can be deleted if EXTERNAL_TABLE_PURGE is true return isExternalTablePurge(tbl); } return deleteData; } /** * Deletes the data in a table's location, if it fails logs an error * * @param tablePath * @param ifPurge completely purge the table (skipping trash) while removing * data from warehouse * @param shouldEnableCm If cm should be enabled */ private void deleteTableData(Path tablePath, boolean ifPurge, boolean shouldEnableCm) { if (tablePath != null) { deleteDataExcludeCmroot(tablePath, ifPurge, shouldEnableCm); } } /** * Deletes the data in a table's location, if it fails logs an error. * * @param tablePath * @param ifPurge completely purge the table (skipping trash) while removing * data from warehouse * @param db Database */ private void deleteTableData(Path tablePath, boolean ifPurge, Database db) { if (tablePath != null) { try { wh.deleteDir(tablePath, true, ifPurge, db); } catch (Exception e) { LOG.error("Failed to delete table directory: " + tablePath + " " + e.getMessage()); } } } /** * Give a list of partitions' locations, tries to delete each one * and for each that fails logs an error. * * @param partPaths * @param ifPurge completely purge the partition (skipping trash) while * removing data from warehouse * @param shouldEnableCm If cm should be enabled */ private void deletePartitionData(List<Path> partPaths, boolean ifPurge, boolean shouldEnableCm) { if (partPaths != null && !partPaths.isEmpty()) { for (Path partPath : partPaths) { deleteDataExcludeCmroot(partPath, ifPurge, shouldEnableCm); } } } /** * Give a list of partitions' locations, tries to delete each one * and for each that fails logs an error. * * @param partPaths * @param ifPurge completely purge the partition (skipping trash) while * removing data from warehouse * @param db Database */ private void deletePartitionData(List<Path> partPaths, boolean ifPurge, Database db) { if (partPaths != null && !partPaths.isEmpty()) { for (Path partPath : partPaths) { try { wh.deleteDir(partPath, true, ifPurge, db); } catch (Exception e) { LOG.error("Failed to delete partition directory: " + partPath + " " + e.getMessage()); } } } } /** * Delete data from path excluding cmdir * and for each that fails logs an error. * * @param path * @param ifPurge completely purge the partition (skipping trash) while * removing data from warehouse * @param shouldEnableCm If cm should be enabled */ private void deleteDataExcludeCmroot(Path path, boolean ifPurge, boolean shouldEnableCm) { try { if (shouldEnableCm) { //Don't delete cmdir if its inside the partition path FileStatus[] statuses = path.getFileSystem(conf).listStatus(path, ReplChangeManager.CMROOT_PATH_FILTER); for (final FileStatus status : statuses) { wh.deleteDir(status.getPath(), true, ifPurge, shouldEnableCm); } //Check if table directory is empty, delete it FileStatus[] statusWithoutFilter = path.getFileSystem(conf).listStatus(path); if (statusWithoutFilter.length == 0) { wh.deleteDir(path, true, ifPurge, shouldEnableCm); } } else { //If no cm delete the complete table directory wh.deleteDir(path, true, ifPurge, shouldEnableCm); } } catch (Exception e) { LOG.error("Failed to delete directory: " + path + " " + e.getMessage()); } } /** * Deletes the partitions specified by catName, dbName, tableName. If checkLocation is true, for * locations of partitions which may not be subdirectories of tablePath checks to make sure the * locations are writable. * * Drops the metadata for each partition. * * Provides a list of locations of partitions which may not be subdirectories of tablePath. * * @param ms RawStore to use for metadata retrieval and delete * @param catName The catName * @param dbName The dbName * @param tableName The tableName * @param tablePath The tablePath of which subdirectories does not have to be checked * @param checkLocation Should we check the locations at all * @return The list of the Path objects to delete (only in case checkLocation is true) * @throws MetaException * @throws IOException * @throws NoSuchObjectException */ private List<Path> dropPartitionsAndGetLocations(RawStore ms, String catName, String dbName, String tableName, Path tablePath, boolean checkLocation) throws MetaException, IOException, NoSuchObjectException { int batchSize = MetastoreConf.getIntVar(conf, ConfVars.BATCH_RETRIEVE_OBJECTS_MAX); String tableDnsPath = null; if (tablePath != null) { tableDnsPath = wh.getDnsPath(tablePath).toString(); } List<Path> partPaths = new ArrayList<>(); while (true) { Map<String, String> partitionLocations = ms.getPartitionLocations(catName, dbName, tableName, tableDnsPath, batchSize); if (partitionLocations == null || partitionLocations.isEmpty()) { // No more partitions left to drop. Return with the collected path list to delete. return partPaths; } if (checkLocation) { for (String partName : partitionLocations.keySet()) { String pathString = partitionLocations.get(partName); if (pathString != null) { Path partPath = wh.getDnsPath(new Path(pathString)); // Double check here. Maybe Warehouse.getDnsPath revealed relationship between the // path objects if (tableDnsPath == null || !FileUtils.isSubdirectory(tableDnsPath, partPath.toString())) { if (!wh.isWritable(partPath.getParent())) { throw new MetaException("Table metadata not deleted since the partition " + partName + " has parent location " + partPath.getParent() + " which is not writable by " + SecurityUtils.getUser()); } partPaths.add(partPath); } } } } for (MetaStoreEventListener listener : listeners) { //No drop part listener events fired for public listeners historically, for drop table case. //Limiting to internal listeners for now, to avoid unexpected calls for public listeners. if (listener instanceof HMSMetricsListener) { for (@SuppressWarnings("unused") String partName : partitionLocations.keySet()) { listener.onDropPartition(null); } } } ms.dropPartitions(catName, dbName, tableName, new ArrayList<>(partitionLocations.keySet())); } } @Override public void drop_table(final String dbname, final String name, final boolean deleteData) throws NoSuchObjectException, MetaException { drop_table_with_environment_context(dbname, name, deleteData, null); } @Override public void drop_table_with_environment_context(final String dbname, final String name, final boolean deleteData, final EnvironmentContext envContext) throws NoSuchObjectException, MetaException { drop_table_with_environment_context(dbname, name, deleteData, envContext, true); } private void drop_table_with_environment_context(final String dbname, final String name, final boolean deleteData, final EnvironmentContext envContext, boolean dropPartitions) throws MetaException { String[] parsedDbName = parseDbName(dbname, conf); startTableFunction("drop_table", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], name); boolean success = false; Exception ex = null; try { success = drop_table_core(getMS(), parsedDbName[CAT_NAME], parsedDbName[DB_NAME], name, deleteData, envContext, null, dropPartitions); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, NoSuchObjectException.class) .convertIfInstance(IOException.class, MetaException.class).defaultMetaException(); } finally { endFunction("drop_table", success, ex, name); } } private void updateStatsForTruncate(Map<String,String> props, EnvironmentContext environmentContext) { if (null == props) { return; } for (String stat : StatsSetupConst.SUPPORTED_STATS) { String statVal = props.get(stat); if (statVal != null) { //In the case of truncate table, we set the stats to be 0. props.put(stat, "0"); } } //first set basic stats to true StatsSetupConst.setBasicStatsState(props, StatsSetupConst.TRUE); environmentContext.putToProperties(StatsSetupConst.STATS_GENERATED, StatsSetupConst.TASK); environmentContext.putToProperties(StatsSetupConst.DO_NOT_POPULATE_QUICK_STATS, StatsSetupConst.TRUE); //then invalidate column stats StatsSetupConst.clearColumnStatsState(props); return; } private void alterPartitionForTruncate(RawStore ms, String catName, String dbName, String tableName, Table table, Partition partition, String validWriteIds, long writeId) throws Exception { EnvironmentContext environmentContext = new EnvironmentContext(); updateStatsForTruncate(partition.getParameters(), environmentContext); if (!transactionalListeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ALTER_PARTITION, new AlterPartitionEvent(partition, partition, table, true, true, writeId, this)); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_PARTITION, new AlterPartitionEvent(partition, partition, table, true, true, writeId, this)); } if (writeId > 0) { partition.setWriteId(writeId); } alterHandler.alterPartition(ms, wh, catName, dbName, tableName, null, partition, environmentContext, this, validWriteIds); } private void alterTableStatsForTruncate(RawStore ms, String catName, String dbName, String tableName, Table table, List<String> partNames, String validWriteIds, long writeId) throws Exception { if (partNames == null) { if (0 != table.getPartitionKeysSize()) { for (Partition partition : ms.getPartitions(catName, dbName, tableName, -1)) { alterPartitionForTruncate(ms, catName, dbName, tableName, table, partition, validWriteIds, writeId); } } else { EnvironmentContext environmentContext = new EnvironmentContext(); updateStatsForTruncate(table.getParameters(), environmentContext); boolean isReplicated = isDbReplicationTarget(ms.getDatabase(catName, dbName)); if (!transactionalListeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ALTER_TABLE, new AlterTableEvent(table, table, true, true, writeId, this, isReplicated)); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_TABLE, new AlterTableEvent(table, table, true, true, writeId, this, isReplicated)); } // TODO: this should actually pass thru and set writeId for txn stats. if (writeId > 0) { table.setWriteId(writeId); } alterHandler.alterTable(ms, wh, catName, dbName, tableName, table, environmentContext, this, validWriteIds); } } else { for (Partition partition : ms.getPartitionsByNames(catName, dbName, tableName, partNames)) { alterPartitionForTruncate(ms, catName, dbName, tableName, table, partition, validWriteIds, writeId); } } return; } private List<Path> getLocationsForTruncate(final RawStore ms, final String catName, final String dbName, final String tableName, final Table table, final List<String> partNames) throws Exception { List<Path> locations = new ArrayList<>(); if (partNames == null) { if (0 != table.getPartitionKeysSize()) { for (Partition partition : ms.getPartitions(catName, dbName, tableName, -1)) { locations.add(new Path(partition.getSd().getLocation())); } } else { locations.add(new Path(table.getSd().getLocation())); } } else { for (Partition partition : ms.getPartitionsByNames(catName, dbName, tableName, partNames)) { locations.add(new Path(partition.getSd().getLocation())); } } return locations; } @Override public CmRecycleResponse cm_recycle(final CmRecycleRequest request) throws MetaException { wh.recycleDirToCmPath(new Path(request.getDataPath()), request.isPurge()); return new CmRecycleResponse(); } @Override public void truncate_table(final String dbName, final String tableName, List<String> partNames) throws NoSuchObjectException, MetaException { // Deprecated path, won't work for txn tables. truncateTableInternal(dbName, tableName, partNames, null, -1, null); } @Override public TruncateTableResponse truncate_table_req(TruncateTableRequest req) throws MetaException, TException { truncateTableInternal(req.getDbName(), req.getTableName(), req.getPartNames(), req.getValidWriteIdList(), req.getWriteId(), req.getEnvironmentContext()); return new TruncateTableResponse(); } private void truncateTableInternal(String dbName, String tableName, List<String> partNames, String validWriteIds, long writeId, EnvironmentContext context) throws MetaException, NoSuchObjectException { boolean isSkipTrash = false, needCmRecycle = false; try { String[] parsedDbName = parseDbName(dbName, conf); Table tbl = get_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName); boolean skipDataDeletion = Optional.ofNullable(context) .map(EnvironmentContext::getProperties) .map(prop -> prop.get(TRUNCATE_SKIP_DATA_DELETION)) .map(Boolean::parseBoolean) .orElse(false); if (!skipDataDeletion) { boolean truncateFiles = !TxnUtils.isTransactionalTable(tbl) || !MetastoreConf.getBoolVar(getConf(), MetastoreConf.ConfVars.TRUNCATE_ACID_USE_BASE); if (truncateFiles) { isSkipTrash = MetaStoreUtils.isSkipTrash(tbl.getParameters()); Database db = get_database_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME]); needCmRecycle = ReplChangeManager.shouldEnableCm(db, tbl); } // This is not transactional for (Path location : getLocationsForTruncate(getMS(), parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, tbl, partNames)) { FileSystem fs = location.getFileSystem(getConf()); if (truncateFiles) { truncateDataFiles(location, fs, isSkipTrash, needCmRecycle); } else { // For Acid tables we don't need to delete the old files, only write an empty baseDir. // Compaction and cleaner will take care of the rest addTruncateBaseFile(location, writeId, fs); } } } // Alter the table/partition stats and also notify truncate table event alterTableStatsForTruncate(getMS(), parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, tbl, partNames, validWriteIds, writeId); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class, NoSuchObjectException.class) .convertIfInstance(IOException.class, MetaException.class) .defaultMetaException(); } } /** * Add an empty baseDir with a truncate metadatafile * @param location partition or table directory * @param writeId allocated writeId * @param fs FileSystem * @throws Exception */ private void addTruncateBaseFile(Path location, long writeId, FileSystem fs) throws Exception { Path basePath = new Path(location, AcidConstants.baseDir(writeId)); fs.mkdirs(basePath); // We can not leave the folder empty, otherwise it will be skipped at some file listing in AcidUtils // No need for a data file, a simple metadata is enough AcidMetaDataFile.writeToFile(fs, basePath, AcidMetaDataFile.DataFormat.TRUNCATED); } private void truncateDataFiles(Path location, FileSystem fs, boolean isSkipTrash, boolean needCmRecycle) throws IOException, MetaException, NoSuchObjectException { if (!HdfsUtils.isPathEncrypted(getConf(), fs.getUri(), location) && !FileUtils.pathHasSnapshotSubDir(location, fs)) { HdfsUtils.HadoopFileStatus status = new HdfsUtils.HadoopFileStatus(getConf(), fs, location); FileStatus targetStatus = fs.getFileStatus(location); String targetGroup = targetStatus == null ? null : targetStatus.getGroup(); wh.deleteDir(location, true, isSkipTrash, needCmRecycle); fs.mkdirs(location); HdfsUtils.setFullFileStatus(getConf(), status, targetGroup, fs, location, false); } else { FileStatus[] statuses = fs.listStatus(location, FileUtils.HIDDEN_FILES_PATH_FILTER); if (statuses == null || statuses.length == 0) { return; } for (final FileStatus status : statuses) { wh.deleteDir(status.getPath(), true, isSkipTrash, needCmRecycle); } } } /** * Is this an external table? * * @param table * Check if this table is external. * @return True if the table is external, otherwise false. */ private boolean isExternal(Table table) { return MetaStoreUtils.isExternalTable(table); } private boolean isExternalTablePurge(Table table) { return MetaStoreUtils.isExternalTablePurge(table); } @Override @Deprecated public Table get_table(final String dbname, final String name) throws MetaException, NoSuchObjectException { String[] parsedDbName = parseDbName(dbname, conf); GetTableRequest getTableRequest = new GetTableRequest(parsedDbName[DB_NAME],name); getTableRequest.setCatName(parsedDbName[CAT_NAME]); return getTableInternal(getTableRequest); } @Override public List<ExtendedTableInfo> get_tables_ext(final GetTablesExtRequest req) throws MetaException { List<String> tables = new ArrayList<String>(); List<ExtendedTableInfo> ret = new ArrayList<ExtendedTableInfo>(); String catalog = req.getCatalog(); String database = req.getDatabase(); String pattern = req.getTableNamePattern(); List<String> processorCapabilities = req.getProcessorCapabilities(); int limit = req.getLimit(); String processorId = req.getProcessorIdentifier(); List<Table> tObjects = new ArrayList<>(); startTableFunction("get_tables_ext", catalog, database, pattern); Exception ex = null; try { tables = getMS().getTables(catalog, database, pattern, null, limit); LOG.debug("get_tables_ext:getTables() returned " + tables.size()); tables = FilterUtils.filterTableNamesIfEnabled(isServerFilterEnabled, filterHook, catalog, database, tables); if (tables.size() > 0) { tObjects = getMS().getTableObjectsByName(catalog, database, tables); LOG.debug("get_tables_ext:getTableObjectsByName() returned " + tObjects.size()); if (processorCapabilities == null || processorCapabilities.size() == 0 || processorCapabilities.contains("MANAGERAWMETADATA")) { LOG.info("Skipping translation for processor with " + processorId); } else { if (transformer != null) { Map<Table, List<String>> retMap = transformer.transform(tObjects, processorCapabilities, processorId); for (Map.Entry<Table, List<String>> entry : retMap.entrySet()) { LOG.debug("Table " + entry.getKey().getTableName() + " requires " + Arrays.toString((entry.getValue()).toArray())); ret.add(convertTableToExtendedTable(entry.getKey(), entry.getValue(), req.getRequestedFields())); } } else { for (Table table : tObjects) { ret.add(convertTableToExtendedTable(table, processorCapabilities, req.getRequestedFields())); } } } } } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_tables_ext", ret != null, ex); } return ret; } private ExtendedTableInfo convertTableToExtendedTable (Table table, List<String> processorCapabilities, int mask) { ExtendedTableInfo extTable = new ExtendedTableInfo(table.getTableName()); if ((mask & GetTablesExtRequestFields.ACCESS_TYPE.getValue()) == GetTablesExtRequestFields.ACCESS_TYPE.getValue()) { extTable.setAccessType(table.getAccessType()); } if ((mask & GetTablesExtRequestFields.PROCESSOR_CAPABILITIES.getValue()) == GetTablesExtRequestFields.PROCESSOR_CAPABILITIES.getValue()) { extTable.setRequiredReadCapabilities(table.getRequiredReadCapabilities()); extTable.setRequiredWriteCapabilities(table.getRequiredWriteCapabilities()); } return extTable; } @Override public GetTableResult get_table_req(GetTableRequest req) throws MetaException, NoSuchObjectException { req.setCatName(req.isSetCatName() ? req.getCatName() : getDefaultCatalog(conf)); return new GetTableResult(getTableInternal(req)); } /** * This function retrieves table from metastore. If getColumnStats flag is true, * then engine should be specified so the table is retrieve with the column stats * for that engine. */ private Table getTableInternal(GetTableRequest getTableRequest) throws MetaException, NoSuchObjectException { Preconditions.checkArgument(!getTableRequest.isGetColumnStats() || getTableRequest.getEngine() != null, "To retrieve column statistics with a table, engine parameter cannot be null"); if (isInTest) { assertClientHasCapability(getTableRequest.getCapabilities(), ClientCapability.TEST_CAPABILITY, "Hive tests", "get_table_req"); } Table t = null; startTableFunction("get_table", getTableRequest.getCatName(), getTableRequest.getDbName(), getTableRequest.getTblName()); Exception ex = null; try { t = get_table_core(getTableRequest); if (MetaStoreUtils.isInsertOnlyTableParam(t.getParameters())) { assertClientHasCapability(getTableRequest.getCapabilities(), ClientCapability.INSERT_ONLY_TABLES, "insert-only tables", "get_table_req"); } if (CollectionUtils.isEmpty(getTableRequest.getProcessorCapabilities()) || getTableRequest .getProcessorCapabilities().contains("MANAGERAWMETADATA")) { LOG.info("Skipping translation for processor with " + getTableRequest.getProcessorIdentifier()); } else { if (transformer != null) { List<Table> tList = new ArrayList<>(); tList.add(t); Map<Table, List<String>> ret = transformer .transform(tList, getTableRequest.getProcessorCapabilities(), getTableRequest.getProcessorIdentifier()); if (ret.size() > 1) { LOG.warn("Unexpected resultset size:" + ret.size()); throw new MetaException("Unexpected result from metadata transformer:return list size is " + ret.size()); } t = ret.keySet().iterator().next(); } } firePreEvent(new PreReadTableEvent(t, this)); } catch (MetaException | NoSuchObjectException e) { ex = e; throw e; } finally { endFunction("get_table", t != null, ex, getTableRequest.getTblName()); } return t; } @Override public List<TableMeta> get_table_meta(String dbnames, String tblNames, List<String> tblTypes) throws MetaException, NoSuchObjectException { List<TableMeta> t = null; String[] parsedDbName = parseDbName(dbnames, conf); startTableFunction("get_table_metas", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblNames); Exception ex = null; try { t = getMS().getTableMeta(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblNames, tblTypes); t = FilterUtils.filterTableMetasIfEnabled(isServerFilterEnabled, filterHook, parsedDbName[CAT_NAME], parsedDbName[DB_NAME], t); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_table_metas", t != null, ex); } return t; } @Override @Deprecated public Table get_table_core( final String catName, final String dbname, final String name) throws MetaException, NoSuchObjectException { GetTableRequest getTableRequest = new GetTableRequest(dbname,name); getTableRequest.setCatName(catName); return get_table_core(getTableRequest); } @Override @Deprecated public Table get_table_core( final String catName, final String dbname, final String name, final String writeIdList) throws MetaException, NoSuchObjectException { GetTableRequest getTableRequest = new GetTableRequest(dbname,name); getTableRequest.setCatName(catName); getTableRequest.setValidWriteIdList(writeIdList); return get_table_core(getTableRequest); } /** * This function retrieves table from metastore. If getColumnStats flag is true, * then engine should be specified so the table is retrieve with the column stats * for that engine. */ @Override public Table get_table_core(GetTableRequest getTableRequest) throws MetaException, NoSuchObjectException { Preconditions.checkArgument(!getTableRequest.isGetColumnStats() || getTableRequest.getEngine() != null, "To retrieve column statistics with a table, engine parameter cannot be null"); String catName = getTableRequest.getCatName(); String dbName = getTableRequest.getDbName(); String tblName = getTableRequest.getTblName(); Database db = null; Table t = null; try { db = get_database_core(catName, dbName); } catch (Exception e) { /* appears exception is not thrown currently if db doesnt exist */ } if (db != null) { if (db.getType().equals(DatabaseType.REMOTE)) { t = DataConnectorProviderFactory.getDataConnectorProvider(db).getTable(tblName); if (t == null) { throw new NoSuchObjectException(TableName.getQualified(catName, dbName, tblName) + " table not found"); } t.setDbName(dbName); return t; } } try { t = getMS().getTable(catName, dbName, tblName, getTableRequest.getValidWriteIdList(), getTableRequest.getId()); if (t == null) { throw new NoSuchObjectException(TableName.getQualified(catName, dbName, tblName) + " table not found"); } // If column statistics was requested and is valid fetch it. if (getTableRequest.isGetColumnStats()) { ColumnStatistics colStats = getMS().getTableColumnStatistics(catName, dbName, tblName, StatsSetupConst.getColumnsHavingStats(t.getParameters()), getTableRequest.getEngine(), getTableRequest.getValidWriteIdList()); if (colStats != null) { t.setColStats(colStats); } } } catch (Exception e) { throwMetaException(e); } return t; } /** * Gets multiple tables from the hive metastore. * * @param dbName * The name of the database in which the tables reside * @param tableNames * The names of the tables to get. * * @return A list of tables whose names are in the the list "names" and * are retrievable from the database specified by "dbnames." * There is no guarantee of the order of the returned tables. * If there are duplicate names, only one instance of the table will be returned. * @throws MetaException * @throws InvalidOperationException * @throws UnknownDBException */ @Override @Deprecated public List<Table> get_table_objects_by_name(final String dbName, final List<String> tableNames) throws MetaException, InvalidOperationException, UnknownDBException { String[] parsedDbName = parseDbName(dbName, conf); return getTableObjectsInternal(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableNames, null, null, null); } @Override public GetTablesResult get_table_objects_by_name_req(GetTablesRequest req) throws TException { String catName = req.isSetCatName() ? req.getCatName() : getDefaultCatalog(conf); if (isDatabaseRemote(req.getDbName())) { return new GetTablesResult(getRemoteTableObjectsInternal(req.getDbName(), req.getTblNames(), req.getTablesPattern())); } return new GetTablesResult(getTableObjectsInternal(catName, req.getDbName(), req.getTblNames(), req.getCapabilities(), req.getProjectionSpec(), req.getTablesPattern())); } private List<Table> filterTablesByName(List<Table> tables, List<String> tableNames) { List<Table> filteredTables = new ArrayList<>(); for (Table table : tables) { if (tableNames.contains(table.getTableName())) { filteredTables.add(table); } } return filteredTables; } private List<Table> getRemoteTableObjectsInternal(String dbname, List<String> tableNames, String pattern) throws MetaException { String[] parsedDbName = parseDbName(dbname, conf); try { // retrieve tables from remote database Database db = get_database_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME]); List<Table> tables = DataConnectorProviderFactory.getDataConnectorProvider(db).getTables(null); // filtered out undesired tables if (tableNames != null) { tables = filterTablesByName(tables, tableNames); } // set remote tables' local hive database reference for (Table table : tables) { table.setDbName(dbname); } return FilterUtils.filterTablesIfEnabled(isServerFilterEnabled, filterHook, tables); } catch (Exception e) { LOG.warn("Unexpected exception while getting table(s) in remote database " + dbname , e); return new ArrayList<Table>(); } } private List<Table> getTableObjectsInternal(String catName, String dbName, List<String> tableNames, ClientCapabilities capabilities, GetProjectionsSpec projectionsSpec, String tablePattern) throws MetaException, InvalidOperationException, UnknownDBException { if (isInTest) { assertClientHasCapability(capabilities, ClientCapability.TEST_CAPABILITY, "Hive tests", "get_table_objects_by_name_req"); } if (projectionsSpec != null) { if (!projectionsSpec.isSetFieldList() && (projectionsSpec.isSetIncludeParamKeyPattern() || projectionsSpec.isSetExcludeParamKeyPattern())) { throw new InvalidOperationException("Include and Exclude Param key are not supported."); } } List<Table> tables = new ArrayList<>(); startMultiTableFunction("get_multi_table", dbName, tableNames); Exception ex = null; int tableBatchSize = MetastoreConf.getIntVar(conf, ConfVars.BATCH_RETRIEVE_MAX); try { if (dbName == null || dbName.isEmpty()) { throw new UnknownDBException("DB name is null or empty"); } RawStore ms = getMS(); if(tablePattern != null){ tables = ms.getTableObjectsByName(catName, dbName, tableNames, projectionsSpec, tablePattern); }else { if (tableNames == null) { throw new InvalidOperationException(dbName + " cannot find null tables"); } // The list of table names could contain duplicates. RawStore.getTableObjectsByName() // only guarantees returning no duplicate table objects in one batch. If we need // to break into multiple batches, remove duplicates first. List<String> distinctTableNames = tableNames; if (distinctTableNames.size() > tableBatchSize) { List<String> lowercaseTableNames = new ArrayList<>(); for (String tableName : tableNames) { lowercaseTableNames.add(org.apache.hadoop.hive.metastore.utils.StringUtils.normalizeIdentifier(tableName)); } distinctTableNames = new ArrayList<>(new HashSet<>(lowercaseTableNames)); } int startIndex = 0; // Retrieve the tables from the metastore in batches. Some databases like // Oracle cannot have over 1000 expressions in a in-list while (startIndex < distinctTableNames.size()) { int endIndex = Math.min(startIndex + tableBatchSize, distinctTableNames.size()); tables.addAll(ms.getTableObjectsByName(catName, dbName, distinctTableNames.subList( startIndex, endIndex), projectionsSpec, tablePattern)); startIndex = endIndex; } } for (Table t : tables) { if (t.getParameters() != null && MetaStoreUtils.isInsertOnlyTableParam(t.getParameters())) { assertClientHasCapability(capabilities, ClientCapability.INSERT_ONLY_TABLES, "insert-only tables", "get_table_req"); } } tables = FilterUtils.filterTablesIfEnabled(isServerFilterEnabled, filterHook, tables); } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(MetaException.class, InvalidOperationException.class, UnknownDBException.class) .defaultMetaException(); } finally { endFunction("get_multi_table", tables != null, ex, join(tableNames, ",")); } return tables; } @Override public Materialization get_materialization_invalidation_info(final CreationMetadata cm, final String validTxnList) throws MetaException { return getTxnHandler().getMaterializationInvalidationInfo(cm, validTxnList); } @Override public void update_creation_metadata(String catName, final String dbName, final String tableName, CreationMetadata cm) throws MetaException { getMS().updateCreationMetadata(catName, dbName, tableName, cm); } private void assertClientHasCapability(ClientCapabilities client, ClientCapability value, String what, String call) throws MetaException { if (!doesClientHaveCapability(client, value)) { throw new MetaException("Your client does not appear to support " + what + ". To skip" + " capability checks, please set " + ConfVars.CAPABILITY_CHECK.toString() + " to false. This setting can be set globally, or on the client for the current" + " metastore session. Note that this may lead to incorrect results, data loss," + " undefined behavior, etc. if your client is actually incompatible. You can also" + " specify custom client capabilities via " + call + " API."); } } private boolean doesClientHaveCapability(ClientCapabilities client, ClientCapability value) { if (!MetastoreConf.getBoolVar(getConf(), ConfVars.CAPABILITY_CHECK)) { return true; } return (client != null && client.isSetValues() && client.getValues().contains(value)); } @Override public List<String> get_table_names_by_filter( final String dbName, final String filter, final short maxTables) throws MetaException, InvalidOperationException, UnknownDBException { List<String> tables = null; startFunction("get_table_names_by_filter", ": db = " + dbName + ", filter = " + filter); Exception ex = null; String[] parsedDbName = parseDbName(dbName, conf); try { if (parsedDbName[CAT_NAME] == null || parsedDbName[CAT_NAME].isEmpty() || parsedDbName[DB_NAME] == null || parsedDbName[DB_NAME].isEmpty()) { throw new UnknownDBException("DB name is null or empty"); } if (filter == null) { throw new InvalidOperationException(filter + " cannot apply null filter"); } tables = getMS().listTableNamesByFilter(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], filter, maxTables); tables = FilterUtils.filterTableNamesIfEnabled( isServerFilterEnabled, filterHook, parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tables); } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(MetaException.class, InvalidOperationException.class, UnknownDBException.class) .defaultMetaException(); } finally { endFunction("get_table_names_by_filter", tables != null, ex, join(tables, ",")); } return tables; } private Partition append_partition_common(RawStore ms, String catName, String dbName, String tableName, List<String> part_vals, EnvironmentContext envContext) throws InvalidObjectException, AlreadyExistsException, MetaException, NoSuchObjectException { Partition part = new Partition(); boolean success = false, madeDir = false; Path partLocation = null; Table tbl = null; Map<String, String> transactionalListenerResponses = Collections.emptyMap(); Database db = null; try { ms.openTransaction(); part.setCatName(catName); part.setDbName(dbName); part.setTableName(tableName); part.setValues(part_vals); MetaStoreServerUtils.validatePartitionNameCharacters(part_vals, partitionValidationPattern); tbl = ms.getTable(part.getCatName(), part.getDbName(), part.getTableName(), null); if (tbl == null) { throw new InvalidObjectException( "Unable to add partition because table or database do not exist"); } if (tbl.getSd().getLocation() == null) { throw new MetaException( "Cannot append a partition to a view"); } db = get_database_core(catName, dbName); firePreEvent(new PreAddPartitionEvent(tbl, part, this)); part.setSd(tbl.getSd().deepCopy()); partLocation = new Path(tbl.getSd().getLocation(), Warehouse .makePartName(tbl.getPartitionKeys(), part_vals)); part.getSd().setLocation(partLocation.toString()); Partition old_part; try { old_part = ms.getPartition(part.getCatName(), part.getDbName(), part .getTableName(), part.getValues()); } catch (NoSuchObjectException e) { // this means there is no existing partition old_part = null; } if (old_part != null) { throw new AlreadyExistsException("Partition already exists:" + part); } if (!wh.isDir(partLocation)) { if (!wh.mkdirs(partLocation)) { throw new MetaException(partLocation + " is not a directory or unable to create one"); } madeDir = true; } // set create time long time = System.currentTimeMillis() / 1000; part.setCreateTime((int) time); part.putToParameters(hive_metastoreConstants.DDL_TIME, Long.toString(time)); if (canUpdateStats(tbl)) { MetaStoreServerUtils.updatePartitionStatsFast(part, tbl, wh, madeDir, false, envContext, true); } if (ms.addPartition(part)) { if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, part, true, this), envContext); } success = ms.commitTransaction(); } } finally { if (!success) { ms.rollbackTransaction(); if (madeDir) { wh.deleteDir(partLocation, true, false, ReplChangeManager.shouldEnableCm(db, tbl)); } } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, part, success, this), envContext, transactionalListenerResponses, ms); } } return part; } private void firePreEvent(PreEventContext event) throws MetaException { for (MetaStorePreEventListener listener : preListeners) { try { listener.onEvent(event); } catch (NoSuchObjectException e) { throw new MetaException(e.getMessage()); } catch (InvalidOperationException e) { throw new MetaException(e.getMessage()); } } } @Override public Partition append_partition(final String dbName, final String tableName, final List<String> part_vals) throws InvalidObjectException, AlreadyExistsException, MetaException { return append_partition_with_environment_context(dbName, tableName, part_vals, null); } @Override public Partition append_partition_with_environment_context(final String dbName, final String tableName, final List<String> part_vals, final EnvironmentContext envContext) throws InvalidObjectException, AlreadyExistsException, MetaException { if (part_vals == null || part_vals.isEmpty()) { throw new MetaException("The partition values must not be null or empty."); } String[] parsedDbName = parseDbName(dbName, conf); startPartitionFunction("append_partition", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, part_vals); if (LOG.isDebugEnabled()) { for (String part : part_vals) { LOG.debug(part); } } Partition ret = null; Exception ex = null; try { ret = append_partition_common(getMS(), parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, part_vals, envContext); } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(MetaException.class, InvalidObjectException.class, AlreadyExistsException.class) .defaultMetaException(); } finally { endFunction("append_partition", ret != null, ex, tableName); } return ret; } private static class PartValEqWrapperLite { List<String> values; String location; PartValEqWrapperLite(Partition partition) { this.values = partition.isSetValues()? partition.getValues() : null; if (partition.getSd() != null) { this.location = partition.getSd().getLocation(); } } @Override public int hashCode() { return values == null ? 0 : values.hashCode(); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || !(obj instanceof PartValEqWrapperLite)) { return false; } List<String> lhsValues = this.values; List<String> rhsValues = ((PartValEqWrapperLite)obj).values; if (lhsValues == null || rhsValues == null) { return lhsValues == rhsValues; } if (lhsValues.size() != rhsValues.size()) { return false; } for (int i=0; i<lhsValues.size(); ++i) { String lhsValue = lhsValues.get(i); String rhsValue = rhsValues.get(i); if ((lhsValue == null && rhsValue != null) || (lhsValue != null && !lhsValue.equals(rhsValue))) { return false; } } return true; } } private List<Partition> add_partitions_core(final RawStore ms, String catName, String dbName, String tblName, List<Partition> parts, final boolean ifNotExists) throws TException { logAndAudit("add_partitions"); boolean success = false; // Ensures that the list doesn't have dups, and keeps track of directories we have created. final Map<PartValEqWrapperLite, Boolean> addedPartitions = new ConcurrentHashMap<>(); final List<Partition> newParts = new ArrayList<>(); final List<Partition> existingParts = new ArrayList<>(); Table tbl = null; Map<String, String> transactionalListenerResponses = Collections.emptyMap(); Database db = null; List<ColumnStatistics> partsColStats = new ArrayList<>(parts.size()); List<Long> partsWriteIds = new ArrayList<>(parts.size()); throwUnsupportedExceptionIfRemoteDB(dbName, "add_partitions"); Lock tableLock = getTableLockFor(dbName, tblName); tableLock.lock(); try { ms.openTransaction(); tbl = ms.getTable(catName, dbName, tblName, null); if (tbl == null) { throw new InvalidObjectException("Unable to add partitions because " + TableName.getQualified(catName, dbName, tblName) + " does not exist"); } db = ms.getDatabase(catName, dbName); if (!parts.isEmpty()) { firePreEvent(new PreAddPartitionEvent(tbl, parts, this)); } Set<PartValEqWrapperLite> partsToAdd = new HashSet<>(parts.size()); List<Partition> partitionsToAdd = new ArrayList<>(parts.size()); List<FieldSchema> partitionKeys = tbl.getPartitionKeys(); for (final Partition part : parts) { // Collect partition column stats to be updated if present. Partition objects passed down // here at the time of replication may have statistics in them, which is required to be // updated in the metadata. But we don't want it to be part of the Partition object when // it's being created or altered, lest it becomes part of the notification event. if (part.isSetColStats()) { partsColStats.add(part.getColStats()); part.unsetColStats(); partsWriteIds.add(part.getWriteId()); } // Iterate through the partitions and validate them. If one of the partitions is // incorrect, an exception will be thrown before the threads which create the partition // folders are submitted. This way we can be sure that no partition and no partition // folder will be created if the list contains an invalid partition. if (validatePartition(part, catName, tblName, dbName, partsToAdd, ms, ifNotExists, partitionKeys)) { partitionsToAdd.add(part); } else { existingParts.add(part); } } newParts.addAll(createPartitionFolders(partitionsToAdd, tbl, addedPartitions)); if (!newParts.isEmpty()) { ms.addPartitions(catName, dbName, tblName, newParts); } // Notification is generated for newly created partitions only. The subset of partitions // that already exist (existingParts), will not generate notifications. if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, newParts, true, this)); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, newParts, true, this), null, transactionalListenerResponses, ms); if (!existingParts.isEmpty()) { // The request has succeeded but we failed to add these partitions. MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, existingParts, false, this), null, null, ms); } } // Update partition column statistics if available. We need a valid writeId list to // update column statistics for a transactional table. But during bootstrap replication, // where we use this feature, we do not have a valid writeId list which was used to // update the stats. But we know for sure that the writeId associated with the stats was // valid then (otherwise stats update would have failed on the source). So, craft a valid // transaction list with only that writeId and use it to update the stats. int cnt = 0; for (ColumnStatistics partColStats: partsColStats) { long writeId = partsWriteIds.get(cnt++); String validWriteIds = null; if (writeId > 0) { ValidWriteIdList validWriteIdList = new ValidReaderWriteIdList(TableName.getDbTable(tbl.getDbName(), tbl.getTableName()), new long[0], new BitSet(), writeId); validWriteIds = validWriteIdList.toString(); } updatePartitonColStatsInternal(tbl, partColStats, validWriteIds, writeId); } success = ms.commitTransaction(); } finally { try { if (!success) { ms.rollbackTransaction(); cleanupPartitionFolders(addedPartitions, db); if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, parts, false, this), null, null, ms); } } } finally { tableLock.unlock(); } } return newParts; } private Lock getTableLockFor(String dbName, String tblName) { return tablelocks.get(dbName + "." + tblName); } /** * Remove the newly created partition folders. The values in the addedPartitions map indicates * whether or not the location of the partition was newly created. If the value is false, the * partition folder will not be removed. * @param addedPartitions * @throws MetaException * @throws IllegalArgumentException */ private void cleanupPartitionFolders(final Map<PartValEqWrapperLite, Boolean> addedPartitions, Database db) throws MetaException, IllegalArgumentException { for (Map.Entry<PartValEqWrapperLite, Boolean> e : addedPartitions.entrySet()) { if (e.getValue()) { // we just created this directory - it's not a case of pre-creation, so we nuke. wh.deleteDir(new Path(e.getKey().location), true, db); } } } /** * Validate a partition before creating it. The validation checks * <ul> * <li>if the database and table names set in the partition are not null and they are matching * with the expected values set in the tblName and dbName parameters.</li> * <li>if the partition values are set.</li> * <li>if none of the partition values is null.</li> * <li>if the partition values are matching with the pattern set in the * 'metastore.partition.name.whitelist.pattern' configuration property.</li> * <li>if the partition doesn't already exist. If the partition already exists, an exception * will be thrown if the ifNotExists parameter is false, otherwise it will be just ignored.</li> * <li>if the partsToAdd set doesn't contain the partition. The partsToAdd set contains the * partitions which are already validated. If the set contains the current partition, it means * that the partition is tried to be added multiple times in the same batch. Please note that * the set will be updated with the current partition if the validation was successful.</li> * </ul> * @param part * @param catName * @param tblName * @param dbName * @param partsToAdd * @param ms * @param ifNotExists * @return * @throws MetaException * @throws TException */ private boolean validatePartition(final Partition part, final String catName, final String tblName, final String dbName, final Set<PartValEqWrapperLite> partsToAdd, final RawStore ms, final boolean ifNotExists, List<FieldSchema> partitionKeys) throws MetaException, TException { if (part.getDbName() == null || part.getTableName() == null) { throw new MetaException("The database and table name must be set in the partition."); } if (!part.getTableName().equalsIgnoreCase(tblName) || !part.getDbName().equalsIgnoreCase(dbName)) { String errorMsg = String.format( "Partition does not belong to target table %s. It belongs to the table %s.%s : %s", TableName.getQualified(catName, dbName, tblName), part.getDbName(), part.getTableName(), part.toString()); throw new MetaException(errorMsg); } if (part.getValues() == null || part.getValues().isEmpty()) { throw new MetaException("The partition values cannot be null or empty."); } if (part.getValues().contains(null)) { throw new MetaException("Partition value cannot be null."); } boolean shouldAdd = startAddPartition(ms, part, partitionKeys, ifNotExists); if (!shouldAdd) { LOG.info("Not adding partition {} as it already exists", part); return false; } if (!partsToAdd.add(new PartValEqWrapperLite(part))) { // Technically, for ifNotExists case, we could insert one and discard the other // because the first one now "exists", but it seems better to report the problem // upstream as such a command doesn't make sense. throw new MetaException("Duplicate partitions in the list: " + part); } return true; } /** * Create the location folders for the partitions. For each partition a separate thread will be * started to create the folder. The method will wait until all threads are finished and returns * the partitions whose folders were created successfully. If an error occurs during the * execution of a thread, a MetaException will be thrown. * @param partitionsToAdd * @param table * @param addedPartitions * @return * @throws MetaException */ private List<Partition> createPartitionFolders(final List<Partition> partitionsToAdd, final Table table, final Map<PartValEqWrapperLite, Boolean> addedPartitions) throws MetaException { final AtomicBoolean failureOccurred = new AtomicBoolean(false); final List<Future<Partition>> partFutures = new ArrayList<>(partitionsToAdd.size()); final Map<PartValEqWrapperLite, Boolean> addedParts = new ConcurrentHashMap<>(); final UserGroupInformation ugi; try { ugi = UserGroupInformation.getCurrentUser(); } catch (IOException e) { throw new RuntimeException(e); } for (final Partition partition : partitionsToAdd) { initializePartitionParameters(table, partition); partFutures.add(threadPool.submit(() -> { if (failureOccurred.get()) { return null; } ugi.doAs((PrivilegedExceptionAction<Partition>) () -> { try { boolean madeDir = createLocationForAddedPartition(table, partition); addedParts.put(new PartValEqWrapperLite(partition), madeDir); initializeAddedPartition(table, partition, madeDir, null); } catch (MetaException e) { throw new IOException(e.getMessage(), e); } return null; }); return partition; })); } List<Partition> newParts = new ArrayList<>(partitionsToAdd.size()); String errorMessage = null; for (Future<Partition> partFuture : partFutures) { try { Partition part = partFuture.get(); if (part != null && !failureOccurred.get()) { newParts.add(part); } } catch (ExecutionException e) { // If an exception is thrown in the execution of a task, set the failureOccurred flag to // true. This flag is visible in the tasks and if its value is true, the partition // folders won't be created. // Then iterate through the remaining tasks and wait for them to finish. The tasks which // are started before the flag got set will then finish creating the partition folders. // The tasks which are started after the flag got set, won't create the partition // folders, to avoid unnecessary work. // This way it is sure that all tasks are finished, when entering the finally part where // the partition folders are cleaned up. It won't happen that a task is still running // when cleaning up the folders, so it is sure we won't have leftover folders. // Canceling the other tasks would be also an option but during testing it turned out // that it is not a trustworthy solution to avoid leftover folders. failureOccurred.compareAndSet(false, true); errorMessage = e.getMessage(); } catch (InterruptedException e) { failureOccurred.compareAndSet(false, true); errorMessage = e.getMessage(); // Restore interruption status of the corresponding thread Thread.currentThread().interrupt(); } } addedPartitions.putAll(addedParts); if (failureOccurred.get()) { throw new MetaException(errorMessage); } return newParts; } @Override public AddPartitionsResult add_partitions_req(AddPartitionsRequest request) throws TException { AddPartitionsResult result = new AddPartitionsResult(); if (request.getParts().isEmpty()) { return result; } try { if (!request.isSetCatName()) { request.setCatName(getDefaultCatalog(conf)); } // Make sure all of the partitions have the catalog set as well request.getParts().forEach(p -> { if (!p.isSetCatName()) { p.setCatName(getDefaultCatalog(conf)); } }); List<Partition> parts = add_partitions_core(getMS(), request.getCatName(), request.getDbName(), request.getTblName(), request.getParts(), request.isIfNotExists()); if (request.isNeedResult()) { result.setPartitions(parts); } } catch (Exception e) { throw handleException(e).throwIfInstance(TException.class).defaultMetaException(); } return result; } @Override public int add_partitions(final List<Partition> parts) throws MetaException, InvalidObjectException, AlreadyExistsException { startFunction("add_partition"); if (parts == null) { throw new MetaException("Partition list cannot be null."); } if (parts.isEmpty()) { return 0; } Integer ret = null; Exception ex = null; try { // Old API assumed all partitions belong to the same table; keep the same assumption if (!parts.get(0).isSetCatName()) { String defaultCat = getDefaultCatalog(conf); for (Partition p : parts) { p.setCatName(defaultCat); } } ret = add_partitions_core(getMS(), parts.get(0).getCatName(), parts.get(0).getDbName(), parts.get(0).getTableName(), parts, false).size(); assert ret == parts.size(); } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(MetaException.class, InvalidObjectException.class, AlreadyExistsException.class) .defaultMetaException(); } finally { String tableName = parts.get(0).getTableName(); endFunction("add_partition", ret != null, ex, tableName); } return ret; } @Override public int add_partitions_pspec(final List<PartitionSpec> partSpecs) throws TException { logAndAudit("add_partitions_pspec"); if (partSpecs.isEmpty()) { return 0; } String dbName = partSpecs.get(0).getDbName(); String tableName = partSpecs.get(0).getTableName(); // If the catalog name isn't set, we need to go through and set it. String catName; if (!partSpecs.get(0).isSetCatName()) { catName = getDefaultCatalog(conf); partSpecs.forEach(ps -> ps.setCatName(catName)); } else { catName = partSpecs.get(0).getCatName(); } return add_partitions_pspec_core(getMS(), catName, dbName, tableName, partSpecs, false); } private int add_partitions_pspec_core(RawStore ms, String catName, String dbName, String tblName, List<PartitionSpec> partSpecs, boolean ifNotExists) throws TException { boolean success = false; if (dbName == null || tblName == null) { throw new MetaException("The database and table name cannot be null."); } // Ensures that the list doesn't have dups, and keeps track of directories we have created. final Map<PartValEqWrapperLite, Boolean> addedPartitions = new ConcurrentHashMap<>(); PartitionSpecProxy partitionSpecProxy = PartitionSpecProxy.Factory.get(partSpecs); final PartitionSpecProxy.PartitionIterator partitionIterator = partitionSpecProxy .getPartitionIterator(); Table tbl = null; Map<String, String> transactionalListenerResponses = Collections.emptyMap(); Database db = null; Lock tableLock = getTableLockFor(dbName, tblName); tableLock.lock(); try { ms.openTransaction(); try { db = ms.getDatabase(catName, dbName); } catch (NoSuchObjectException notExists) { throw new InvalidObjectException("Unable to add partitions because " + "database or table " + dbName + "." + tblName + " does not exist"); } if (db.getType() == DatabaseType.REMOTE) { throw new MetaException("Operation add_partitions_pspec not supported on tables in REMOTE database"); } tbl = ms.getTable(catName, dbName, tblName, null); if (tbl == null) { throw new InvalidObjectException("Unable to add partitions because " + "database or table " + dbName + "." + tblName + " does not exist"); } firePreEvent(new PreAddPartitionEvent(tbl, partitionSpecProxy, this)); Set<PartValEqWrapperLite> partsToAdd = new HashSet<>(partitionSpecProxy.size()); List<Partition> partitionsToAdd = new ArrayList<>(partitionSpecProxy.size()); List<FieldSchema> partitionKeys = tbl.getPartitionKeys(); while (partitionIterator.hasNext()) { // Iterate through the partitions and validate them. If one of the partitions is // incorrect, an exception will be thrown before the threads which create the partition // folders are submitted. This way we can be sure that no partition or partition folder // will be created if the list contains an invalid partition. final Partition part = partitionIterator.getCurrent(); if (validatePartition(part, catName, tblName, dbName, partsToAdd, ms, ifNotExists, partitionKeys)) { partitionsToAdd.add(part); } partitionIterator.next(); } createPartitionFolders(partitionsToAdd, tbl, addedPartitions); ms.addPartitions(catName, dbName, tblName, partitionSpecProxy, ifNotExists); if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, partitionSpecProxy, true, this)); } success = ms.commitTransaction(); return addedPartitions.size(); } finally { try { if (!success) { ms.rollbackTransaction(); cleanupPartitionFolders(addedPartitions, db); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, partitionSpecProxy, true, this), null, transactionalListenerResponses, ms); } } finally { tableLock.unlock(); } } } private boolean startAddPartition( RawStore ms, Partition part, List<FieldSchema> partitionKeys, boolean ifNotExists) throws TException { MetaStoreServerUtils.validatePartitionNameCharacters(part.getValues(), partitionValidationPattern); boolean doesExist = ms.doesPartitionExist(part.getCatName(), part.getDbName(), part.getTableName(), partitionKeys, part.getValues()); if (doesExist && !ifNotExists) { throw new AlreadyExistsException("Partition already exists: " + part); } return !doesExist; } /** * Handles the location for a partition being created. * @param tbl Table. * @param part Partition. * @return Whether the partition SD location is set to a newly created directory. */ private boolean createLocationForAddedPartition( final Table tbl, final Partition part) throws MetaException { Path partLocation = null; String partLocationStr = null; if (part.getSd() != null) { partLocationStr = part.getSd().getLocation(); } if (partLocationStr == null || partLocationStr.isEmpty()) { // set default location if not specified and this is // a physical table partition (not a view) if (tbl.getSd().getLocation() != null) { partLocation = new Path(tbl.getSd().getLocation(), Warehouse .makePartName(tbl.getPartitionKeys(), part.getValues())); } } else { if (tbl.getSd().getLocation() == null) { throw new MetaException("Cannot specify location for a view partition"); } partLocation = wh.getDnsPath(new Path(partLocationStr)); } boolean result = false; if (partLocation != null) { part.getSd().setLocation(partLocation.toString()); // Check to see if the directory already exists before calling // mkdirs() because if the file system is read-only, mkdirs will // throw an exception even if the directory already exists. if (!wh.isDir(partLocation)) { if (!wh.mkdirs(partLocation)) { throw new MetaException(partLocation + " is not a directory or unable to create one"); } result = true; } } return result; } /** * Verify if update stats while altering partition(s) * For the following three cases HMS will not update partition stats * 1) Table property 'DO_NOT_UPDATE_STATS' = True * 2) HMS configuration property 'STATS_AUTO_GATHER' = False * 3) Is View */ private boolean canUpdateStats(Table tbl) { Map<String,String> tblParams = tbl.getParameters(); boolean updateStatsTbl = true; if ((tblParams != null) && tblParams.containsKey(StatsSetupConst.DO_NOT_UPDATE_STATS)) { updateStatsTbl = !Boolean.valueOf(tblParams.get(StatsSetupConst.DO_NOT_UPDATE_STATS)); } if (!MetastoreConf.getBoolVar(conf, ConfVars.STATS_AUTO_GATHER) || MetaStoreUtils.isView(tbl) || !updateStatsTbl) { return false; } return true; } private void initializeAddedPartition(final Table tbl, final Partition part, boolean madeDir, EnvironmentContext environmentContext) throws MetaException { initializeAddedPartition(tbl, new PartitionSpecProxy.SimplePartitionWrapperIterator(part), madeDir, environmentContext); } private void initializeAddedPartition( final Table tbl, final PartitionSpecProxy.PartitionIterator part, boolean madeDir, EnvironmentContext environmentContext) throws MetaException { if (canUpdateStats(tbl)) { MetaStoreServerUtils.updatePartitionStatsFast(part, tbl, wh, madeDir, false, environmentContext, true); } // set create time long time = System.currentTimeMillis() / 1000; part.setCreateTime((int) time); if (part.getParameters() == null || part.getParameters().get(hive_metastoreConstants.DDL_TIME) == null) { part.putToParameters(hive_metastoreConstants.DDL_TIME, Long.toString(time)); } } private void initializePartitionParameters(final Table tbl, final Partition part) throws MetaException { initializePartitionParameters(tbl, new PartitionSpecProxy.SimplePartitionWrapperIterator(part)); } private void initializePartitionParameters(final Table tbl, final PartitionSpecProxy.PartitionIterator part) throws MetaException { // Inherit table properties into partition properties. Map<String, String> tblParams = tbl.getParameters(); String inheritProps = MetastoreConf.getVar(conf, ConfVars.PART_INHERIT_TBL_PROPS).trim(); // Default value is empty string in which case no properties will be inherited. // * implies all properties needs to be inherited Set<String> inheritKeys = new HashSet<>(Arrays.asList(inheritProps.split(","))); if (inheritKeys.contains("*")) { inheritKeys = tblParams.keySet(); } for (String key : inheritKeys) { String paramVal = tblParams.get(key); if (null != paramVal) { // add the property only if it exists in table properties part.putToParameters(key, paramVal); } } } private Partition add_partition_core(final RawStore ms, final Partition part, final EnvironmentContext envContext) throws TException { boolean success = false; Table tbl = null; Map<String, String> transactionalListenerResponses = Collections.emptyMap(); if (!part.isSetCatName()) { part.setCatName(getDefaultCatalog(conf)); } try { ms.openTransaction(); tbl = ms.getTable(part.getCatName(), part.getDbName(), part.getTableName(), null); if (tbl == null) { throw new InvalidObjectException( "Unable to add partition because table or database do not exist"); } firePreEvent(new PreAddPartitionEvent(tbl, part, this)); if (part.getValues() == null || part.getValues().isEmpty()) { throw new MetaException("The partition values cannot be null or empty."); } boolean shouldAdd = startAddPartition(ms, part, tbl.getPartitionKeys(), false); assert shouldAdd; // start would throw if it already existed here boolean madeDir = createLocationForAddedPartition(tbl, part); try { initializeAddedPartition(tbl, part, madeDir, envContext); initializePartitionParameters(tbl, part); success = ms.addPartition(part); } finally { if (!success && madeDir) { wh.deleteDir(new Path(part.getSd().getLocation()), true, false, ReplChangeManager.shouldEnableCm(ms.getDatabase(part.getCatName(), part.getDbName()), tbl)); } } // Setting success to false to make sure that if the listener fails, rollback happens. success = false; if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, Arrays.asList(part), true, this), envContext); } // we proceed only if we'd actually succeeded anyway, otherwise, // we'd have thrown an exception success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, Arrays.asList(part), success, this), envContext, transactionalListenerResponses, ms); } } return part; } @Override public Partition add_partition(final Partition part) throws InvalidObjectException, AlreadyExistsException, MetaException { return add_partition_with_environment_context(part, null); } @Override public Partition add_partition_with_environment_context( final Partition part, EnvironmentContext envContext) throws InvalidObjectException, AlreadyExistsException, MetaException { if (part == null) { throw new MetaException("Partition cannot be null."); } startTableFunction("add_partition", part.getCatName(), part.getDbName(), part.getTableName()); Partition ret = null; Exception ex = null; try { ret = add_partition_core(getMS(), part, envContext); } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(MetaException.class, InvalidObjectException.class, AlreadyExistsException.class) .defaultMetaException(); } finally { endFunction("add_partition", ret != null, ex, part != null ? part.getTableName(): null); } return ret; } @Override public Partition exchange_partition(Map<String, String> partitionSpecs, String sourceDbName, String sourceTableName, String destDbName, String destTableName) throws TException { exchange_partitions(partitionSpecs, sourceDbName, sourceTableName, destDbName, destTableName); // Wouldn't it make more sense to return the first element of the list returned by the // previous call? return new Partition(); } @Override public List<Partition> exchange_partitions(Map<String, String> partitionSpecs, String sourceDbName, String sourceTableName, String destDbName, String destTableName) throws TException { String[] parsedDestDbName = parseDbName(destDbName, conf); String[] parsedSourceDbName = parseDbName(sourceDbName, conf); // No need to check catalog for null as parseDbName() will never return null for the catalog. if (partitionSpecs == null || parsedSourceDbName[DB_NAME] == null || sourceTableName == null || parsedDestDbName[DB_NAME] == null || destTableName == null) { throw new MetaException("The DB and table name for the source and destination tables," + " and the partition specs must not be null."); } if (!parsedDestDbName[CAT_NAME].equals(parsedSourceDbName[CAT_NAME])) { throw new MetaException("You cannot move a partition across catalogs"); } boolean success = false; boolean pathCreated = false; RawStore ms = getMS(); ms.openTransaction(); Table destinationTable = ms.getTable( parsedDestDbName[CAT_NAME], parsedDestDbName[DB_NAME], destTableName, null); if (destinationTable == null) { throw new MetaException( "The destination table " + TableName.getQualified(parsedDestDbName[CAT_NAME], parsedDestDbName[DB_NAME], destTableName) + " not found"); } Table sourceTable = ms.getTable( parsedSourceDbName[CAT_NAME], parsedSourceDbName[DB_NAME], sourceTableName, null); if (sourceTable == null) { throw new MetaException("The source table " + TableName.getQualified(parsedSourceDbName[CAT_NAME], parsedSourceDbName[DB_NAME], sourceTableName) + " not found"); } List<String> partVals = MetaStoreUtils.getPvals(sourceTable.getPartitionKeys(), partitionSpecs); List<String> partValsPresent = new ArrayList<> (); List<FieldSchema> partitionKeysPresent = new ArrayList<> (); int i = 0; for (FieldSchema fs: sourceTable.getPartitionKeys()) { String partVal = partVals.get(i); if (partVal != null && !partVal.equals("")) { partValsPresent.add(partVal); partitionKeysPresent.add(fs); } i++; } // Passed the unparsed DB name here, as get_partitions_ps expects to parse it List<Partition> partitionsToExchange = get_partitions_ps(sourceDbName, sourceTableName, partVals, (short)-1); if (partitionsToExchange == null || partitionsToExchange.isEmpty()) { throw new MetaException("No partition is found with the values " + partitionSpecs + " for the table " + sourceTableName); } boolean sameColumns = MetaStoreUtils.compareFieldColumns( sourceTable.getSd().getCols(), destinationTable.getSd().getCols()); boolean samePartitions = MetaStoreUtils.compareFieldColumns( sourceTable.getPartitionKeys(), destinationTable.getPartitionKeys()); if (!sameColumns || !samePartitions) { throw new MetaException("The tables have different schemas." + " Their partitions cannot be exchanged."); } Path sourcePath = new Path(sourceTable.getSd().getLocation(), Warehouse.makePartName(partitionKeysPresent, partValsPresent)); Path destPath = new Path(destinationTable.getSd().getLocation(), Warehouse.makePartName(partitionKeysPresent, partValsPresent)); List<Partition> destPartitions = new ArrayList<>(); Map<String, String> transactionalListenerResponsesForAddPartition = Collections.emptyMap(); List<Map<String, String>> transactionalListenerResponsesForDropPartition = Lists.newArrayListWithCapacity(partitionsToExchange.size()); // Check if any of the partitions already exists in destTable. List<String> destPartitionNames = ms.listPartitionNames(parsedDestDbName[CAT_NAME], parsedDestDbName[DB_NAME], destTableName, (short) -1); if (destPartitionNames != null && !destPartitionNames.isEmpty()) { for (Partition partition : partitionsToExchange) { String partToExchangeName = Warehouse.makePartName(destinationTable.getPartitionKeys(), partition.getValues()); if (destPartitionNames.contains(partToExchangeName)) { throw new MetaException("The partition " + partToExchangeName + " already exists in the table " + destTableName); } } } Database srcDb = ms.getDatabase(parsedSourceDbName[CAT_NAME], parsedSourceDbName[DB_NAME]); Database destDb = ms.getDatabase(parsedDestDbName[CAT_NAME], parsedDestDbName[DB_NAME]); if (!HiveMetaStore.isRenameAllowed(srcDb, destDb)) { throw new MetaException("Exchange partition not allowed for " + TableName.getQualified(parsedSourceDbName[CAT_NAME], parsedSourceDbName[DB_NAME], sourceTableName) + " Dest db : " + destDbName); } try { for (Partition partition: partitionsToExchange) { Partition destPartition = new Partition(partition); destPartition.setDbName(parsedDestDbName[DB_NAME]); destPartition.setTableName(destinationTable.getTableName()); Path destPartitionPath = new Path(destinationTable.getSd().getLocation(), Warehouse.makePartName(destinationTable.getPartitionKeys(), partition.getValues())); destPartition.getSd().setLocation(destPartitionPath.toString()); ms.addPartition(destPartition); destPartitions.add(destPartition); ms.dropPartition(parsedSourceDbName[CAT_NAME], partition.getDbName(), sourceTable.getTableName(), partition.getValues()); } Path destParentPath = destPath.getParent(); if (!wh.isDir(destParentPath)) { if (!wh.mkdirs(destParentPath)) { throw new MetaException("Unable to create path " + destParentPath); } } /* * TODO: Use the hard link feature of hdfs * once https://issues.apache.org/jira/browse/HDFS-3370 is done */ pathCreated = wh.renameDir(sourcePath, destPath, false); // Setting success to false to make sure that if the listener fails, rollback happens. success = false; if (!transactionalListeners.isEmpty()) { transactionalListenerResponsesForAddPartition = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_PARTITION, new AddPartitionEvent(destinationTable, destPartitions, true, this)); for (Partition partition : partitionsToExchange) { DropPartitionEvent dropPartitionEvent = new DropPartitionEvent(sourceTable, partition, true, true, this); transactionalListenerResponsesForDropPartition.add( MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DROP_PARTITION, dropPartitionEvent)); } } success = ms.commitTransaction(); return destPartitions; } finally { if (!success || !pathCreated) { ms.rollbackTransaction(); if (pathCreated) { wh.renameDir(destPath, sourcePath, false); } } if (!listeners.isEmpty()) { AddPartitionEvent addPartitionEvent = new AddPartitionEvent(destinationTable, destPartitions, success, this); MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_PARTITION, addPartitionEvent, null, transactionalListenerResponsesForAddPartition, ms); i = 0; for (Partition partition : partitionsToExchange) { DropPartitionEvent dropPartitionEvent = new DropPartitionEvent(sourceTable, partition, success, true, this); Map<String, String> parameters = (transactionalListenerResponsesForDropPartition.size() > i) ? transactionalListenerResponsesForDropPartition.get(i) : null; MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_PARTITION, dropPartitionEvent, null, parameters, ms); i++; } } } } private boolean drop_partition_common(RawStore ms, String catName, String db_name, String tbl_name, List<String> part_vals, final boolean deleteData, final EnvironmentContext envContext) throws MetaException, NoSuchObjectException, IOException, InvalidObjectException, InvalidInputException { boolean success = false; Path partPath = null; Table tbl = null; Partition part = null; boolean isArchived = false; Path archiveParentDir = null; boolean mustPurge = false; boolean tableDataShouldBeDeleted = false; boolean needsCm = false; Map<String, String> transactionalListenerResponses = Collections.emptyMap(); if (db_name == null) { throw new MetaException("The DB name cannot be null."); } if (tbl_name == null) { throw new MetaException("The table name cannot be null."); } if (part_vals == null) { throw new MetaException("The partition values cannot be null."); } try { ms.openTransaction(); part = ms.getPartition(catName, db_name, tbl_name, part_vals); GetTableRequest request = new GetTableRequest(db_name,tbl_name); request.setCatName(catName); tbl = get_table_core(request); tableDataShouldBeDeleted = checkTableDataShouldBeDeleted(tbl, deleteData); firePreEvent(new PreDropPartitionEvent(tbl, part, deleteData, this)); mustPurge = isMustPurge(envContext, tbl); if (part == null) { throw new NoSuchObjectException("Partition doesn't exist. " + part_vals); } isArchived = MetaStoreUtils.isArchived(part); if (isArchived) { archiveParentDir = MetaStoreUtils.getOriginalLocation(part); verifyIsWritablePath(archiveParentDir); } if ((part.getSd() != null) && (part.getSd().getLocation() != null)) { partPath = new Path(part.getSd().getLocation()); verifyIsWritablePath(partPath); } if (!ms.dropPartition(catName, db_name, tbl_name, part_vals)) { throw new MetaException("Unable to drop partition"); } else { if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DROP_PARTITION, new DropPartitionEvent(tbl, part, true, deleteData, this), envContext); } needsCm = ReplChangeManager.shouldEnableCm(ms.getDatabase(catName, db_name), tbl); success = ms.commitTransaction(); } } finally { if (!success) { ms.rollbackTransaction(); } else if (deleteData && ((partPath != null) || (archiveParentDir != null))) { if (tableDataShouldBeDeleted) { if (mustPurge) { LOG.info("dropPartition() will purge " + partPath + " directly, skipping trash."); } else { LOG.info("dropPartition() will move " + partPath + " to trash-directory."); } // Archived partitions have har:/to_har_file as their location. // The original directory was saved in params if (isArchived) { assert (archiveParentDir != null); wh.deleteDir(archiveParentDir, true, mustPurge, needsCm); } else { assert (partPath != null); wh.deleteDir(partPath, true, mustPurge, needsCm); deleteParentRecursive(partPath.getParent(), part_vals.size() - 1, mustPurge, needsCm); } // ok even if the data is not deleted } } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_PARTITION, new DropPartitionEvent(tbl, part, success, deleteData, this), envContext, transactionalListenerResponses, ms); } } return true; } private static boolean isMustPurge(EnvironmentContext envContext, Table tbl) { // Data needs deletion. Check if trash may be skipped. // Trash may be skipped iff: // 1. deleteData == true, obviously. // 2. tbl is external. // 3. Either // 3.1. User has specified PURGE from the commandline, and if not, // 3.2. User has set the table to auto-purge. return ((envContext != null) && Boolean.parseBoolean(envContext.getProperties().get("ifPurge"))) || MetaStoreUtils.isSkipTrash(tbl.getParameters()); } private void throwUnsupportedExceptionIfRemoteDB(String dbName, String operationName) throws MetaException { if (isDatabaseRemote(dbName)) { throw new MetaException("Operation " + operationName + " not supported for REMOTE database " + dbName); } } private boolean isDatabaseRemote(String name) { try { String[] parsedDbName = parseDbName(name, conf); Database db = get_database_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME]); if (db != null && db.getType() != null && db.getType() == DatabaseType.REMOTE) { return true; } } catch (Exception e) { return false; } return false; } private void deleteParentRecursive(Path parent, int depth, boolean mustPurge, boolean needRecycle) throws IOException, MetaException { if (depth > 0 && parent != null && wh.isWritable(parent) && wh.isEmptyDir(parent)) { wh.deleteDir(parent, true, mustPurge, needRecycle); deleteParentRecursive(parent.getParent(), depth - 1, mustPurge, needRecycle); } } @Override public boolean drop_partition(final String db_name, final String tbl_name, final List<String> part_vals, final boolean deleteData) throws TException { return drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData, null); } /** Stores a path and its size. */ private static class PathAndDepth implements Comparable<PathAndDepth> { final Path path; final int depth; public PathAndDepth(Path path, int depth) { this.path = path; this.depth = depth; } @Override public int hashCode() { return Objects.hash(path.hashCode(), depth); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PathAndDepth that = (PathAndDepth) o; return depth == that.depth && Objects.equals(path, that.path); } /** The largest {@code depth} is processed first in a {@link PriorityQueue}. */ @Override public int compareTo(PathAndDepth o) { return o.depth - depth; } } @Override public DropPartitionsResult drop_partitions_req( DropPartitionsRequest request) throws TException { RawStore ms = getMS(); String dbName = request.getDbName(), tblName = request.getTblName(); String catName = request.isSetCatName() ? request.getCatName() : getDefaultCatalog(conf); boolean ifExists = request.isSetIfExists() && request.isIfExists(); boolean deleteData = request.isSetDeleteData() && request.isDeleteData(); boolean ignoreProtection = request.isSetIgnoreProtection() && request.isIgnoreProtection(); boolean needResult = !request.isSetNeedResult() || request.isNeedResult(); List<PathAndDepth> dirsToDelete = new ArrayList<>(); List<Path> archToDelete = new ArrayList<>(); EnvironmentContext envContext = request.isSetEnvironmentContext() ? request.getEnvironmentContext() : null; boolean success = false; ms.openTransaction(); Table tbl = null; List<Partition> parts = null; boolean mustPurge = false; Map<String, String> transactionalListenerResponses = null; boolean needsCm = ReplChangeManager.shouldEnableCm(ms.getDatabase(catName, dbName), ms.getTable(catName, dbName, tblName)); try { // We need Partition-s for firing events and for result; DN needs MPartition-s to drop. // Great... Maybe we could bypass fetching MPartitions by issuing direct SQL deletes. tbl = get_table_core(catName, dbName, tblName); mustPurge = isMustPurge(envContext, tbl); int minCount = 0; RequestPartsSpec spec = request.getParts(); List<String> partNames = null; if (spec.isSetExprs()) { // Dropping by expressions. parts = new ArrayList<>(spec.getExprs().size()); for (DropPartitionsExpr expr : spec.getExprs()) { ++minCount; // At least one partition per expression, if not ifExists List<Partition> result = new ArrayList<>(); boolean hasUnknown = ms.getPartitionsByExpr( catName, dbName, tblName, expr.getExpr(), null, (short)-1, result); if (hasUnknown) { // Expr is built by DDLSA, it should only contain part cols and simple ops throw new MetaException("Unexpected unknown partitions to drop"); } // this is to prevent dropping archived partition which is archived in a // different level the drop command specified. if (!ignoreProtection && expr.isSetPartArchiveLevel()) { for (Partition part : parts) { if (MetaStoreUtils.isArchived(part) && MetaStoreUtils.getArchivingLevel(part) < expr.getPartArchiveLevel()) { throw new MetaException("Cannot drop a subset of partitions " + " in an archive, partition " + part); } } } parts.addAll(result); } } else if (spec.isSetNames()) { partNames = spec.getNames(); minCount = partNames.size(); parts = ms.getPartitionsByNames(catName, dbName, tblName, partNames); } else { throw new MetaException("Partition spec is not set"); } if ((parts.size() < minCount) && !ifExists) { throw new NoSuchObjectException("Some partitions to drop are missing"); } List<String> colNames = null; if (partNames == null) { partNames = new ArrayList<>(parts.size()); colNames = new ArrayList<>(tbl.getPartitionKeys().size()); for (FieldSchema col : tbl.getPartitionKeys()) { colNames.add(col.getName()); } } for (Partition part : parts) { // TODO - we need to speed this up for the normal path where all partitions are under // the table and we don't have to stat every partition firePreEvent(new PreDropPartitionEvent(tbl, part, deleteData, this)); if (colNames != null) { partNames.add(FileUtils.makePartName(colNames, part.getValues())); } // Preserve the old behavior of failing when we cannot write, even w/o deleteData, // and even if the table is external. That might not make any sense. if (MetaStoreUtils.isArchived(part)) { Path archiveParentDir = MetaStoreUtils.getOriginalLocation(part); verifyIsWritablePath(archiveParentDir); archToDelete.add(archiveParentDir); } if ((part.getSd() != null) && (part.getSd().getLocation() != null)) { Path partPath = new Path(part.getSd().getLocation()); verifyIsWritablePath(partPath); dirsToDelete.add(new PathAndDepth(partPath, part.getValues().size())); } } ms.dropPartitions(catName, dbName, tblName, partNames); if (parts != null && !parts.isEmpty() && !transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier .notifyEvent(transactionalListeners, EventType.DROP_PARTITION, new DropPartitionEvent(tbl, parts, true, deleteData, this), envContext); } success = ms.commitTransaction(); DropPartitionsResult result = new DropPartitionsResult(); if (needResult) { result.setPartitions(parts); } return result; } finally { if (!success) { ms.rollbackTransaction(); } else if (checkTableDataShouldBeDeleted(tbl, deleteData)) { LOG.info( mustPurge? "dropPartition() will purge partition-directories directly, skipping trash." : "dropPartition() will move partition-directories to trash-directory."); // Archived partitions have har:/to_har_file as their location. // The original directory was saved in params for (Path path : archToDelete) { wh.deleteDir(path, true, mustPurge, needsCm); } // Uses a priority queue to delete the parents of deleted directories if empty. // Parents with the deepest path are always processed first. It guarantees that the emptiness // of a parent won't be changed once it has been processed. So duplicated processing can be // avoided. PriorityQueue<PathAndDepth> parentsToDelete = new PriorityQueue<>(); for (PathAndDepth p : dirsToDelete) { wh.deleteDir(p.path, true, mustPurge, needsCm); addParentForDel(parentsToDelete, p); } HashSet<PathAndDepth> processed = new HashSet<>(); while (!parentsToDelete.isEmpty()) { try { PathAndDepth p = parentsToDelete.poll(); if (processed.contains(p)) { continue; } processed.add(p); Path path = p.path; if (wh.isWritable(path) && wh.isEmptyDir(path)) { wh.deleteDir(path, true, mustPurge, needsCm); addParentForDel(parentsToDelete, p); } } catch (IOException ex) { LOG.warn("Error from recursive parent deletion", ex); throw new MetaException("Failed to delete parent: " + ex.getMessage()); } } } if (parts != null) { if (parts != null && !parts.isEmpty() && !listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_PARTITION, new DropPartitionEvent(tbl, parts, success, deleteData, this), envContext, transactionalListenerResponses, ms); } } } } private static void addParentForDel(PriorityQueue<PathAndDepth> parentsToDelete, PathAndDepth p) { Path parent = p.path.getParent(); if (parent != null && p.depth - 1 > 0) { parentsToDelete.add(new PathAndDepth(parent, p.depth - 1)); } } private void verifyIsWritablePath(Path dir) throws MetaException { try { if (!wh.isWritable(dir.getParent())) { throw new MetaException("Table partition not deleted since " + dir.getParent() + " is not writable by " + SecurityUtils.getUser()); } } catch (IOException ex) { LOG.warn("Error from isWritable", ex); throw new MetaException("Table partition not deleted since " + dir.getParent() + " access cannot be checked: " + ex.getMessage()); } } @Override public boolean drop_partition_with_environment_context(final String db_name, final String tbl_name, final List<String> part_vals, final boolean deleteData, final EnvironmentContext envContext) throws TException { String[] parsedDbName = parseDbName(db_name, conf); startPartitionFunction("drop_partition", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals); LOG.info("Partition values:" + part_vals); boolean ret = false; Exception ex = null; try { ret = drop_partition_common(getMS(), parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals, deleteData, envContext); } catch (Exception e) { ex = e; handleException(e).convertIfInstance(IOException.class, MetaException.class) .rethrowException(e); } finally { endFunction("drop_partition", ret, ex, tbl_name); } return ret; } /** * Use {@link #get_partition_req(GetPartitionRequest)} ()} instead. * */ @Override @Deprecated public Partition get_partition(final String db_name, final String tbl_name, final List<String> part_vals) throws MetaException, NoSuchObjectException { String[] parsedDbName = parseDbName(db_name, conf); startPartitionFunction("get_partition", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals); Partition ret = null; Exception ex = null; try { authorizeTableForPartitionMetadata(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); fireReadTablePreEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); ret = getMS().getPartition(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals); ret = FilterUtils.filterPartitionIfEnabled(isServerFilterEnabled, filterHook, ret); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, NoSuchObjectException.class).defaultMetaException(); } finally { endFunction("get_partition", ret != null, ex, tbl_name); } return ret; } @Override public GetPartitionResponse get_partition_req(GetPartitionRequest req) throws MetaException, NoSuchObjectException, TException { // TODO Move the logic from get_partition to here, as that method is getting deprecated String dbName = MetaStoreUtils.prependCatalogToDbName(req.getCatName(), req.getDbName(), conf); Partition p = get_partition(dbName, req.getTblName(), req.getPartVals()); GetPartitionResponse res = new GetPartitionResponse(); res.setPartition(p); return res; } /** * Fire a pre-event for read table operation, if there are any * pre-event listeners registered */ private void fireReadTablePreEvent(String catName, String dbName, String tblName) throws MetaException, NoSuchObjectException { if(preListeners.size() > 0) { Supplier<Table> tableSupplier = Suppliers.memoize(new Supplier<Table>() { @Override public Table get() { try { Table t = getMS().getTable(catName, dbName, tblName, null); if (t == null) { throw new NoSuchObjectException(TableName.getQualified(catName, dbName, tblName) + " table not found"); } return t; } catch(MetaException | NoSuchObjectException e) { throw new RuntimeException(e); } } }); firePreEvent(new PreReadTableEvent(tableSupplier, this)); } } @Override @Deprecated public Partition get_partition_with_auth(final String db_name, final String tbl_name, final List<String> part_vals, final String user_name, final List<String> group_names) throws TException { String[] parsedDbName = parseDbName(db_name, conf); startPartitionFunction("get_partition_with_auth", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals); fireReadTablePreEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); Partition ret = null; Exception ex = null; try { authorizeTableForPartitionMetadata(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); ret = getMS().getPartitionWithAuth(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals, user_name, group_names); ret = FilterUtils.filterPartitionIfEnabled(isServerFilterEnabled, filterHook, ret); } catch (Exception e) { ex = e; handleException(e).convertIfInstance(InvalidObjectException.class, NoSuchObjectException.class) .rethrowException(e); } finally { endFunction("get_partition_with_auth", ret != null, ex, tbl_name); } return ret; } /** * Use {@link #get_partitions_req(PartitionsRequest)} ()} instead. * */ @Override @Deprecated public List<Partition> get_partitions(final String db_name, final String tbl_name, final short max_parts) throws NoSuchObjectException, MetaException { String[] parsedDbName = parseDbName(db_name, conf); startTableFunction("get_partitions", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); fireReadTablePreEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); List<Partition> ret = null; Exception ex = null; try { checkLimitNumberOfPartitionsByFilter(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, NO_FILTER_STRING, max_parts); authorizeTableForPartitionMetadata(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); ret = getMS().getPartitions(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, max_parts); ret = FilterUtils.filterPartitionsIfEnabled(isServerFilterEnabled, filterHook, ret); } catch (Exception e) { ex = e; throwMetaException(e); } finally { endFunction("get_partitions", ret != null, ex, tbl_name); } return ret; } @Override public PartitionsResponse get_partitions_req(PartitionsRequest req) throws NoSuchObjectException, MetaException, TException { String dbName = MetaStoreUtils.prependCatalogToDbName(req.getCatName(), req.getDbName(), conf); List<Partition> partitions = get_partitions(dbName, req.getTblName(), req.getMaxParts()); PartitionsResponse res = new PartitionsResponse(); res.setPartitions(partitions); return res; } @Override @Deprecated public List<Partition> get_partitions_with_auth(final String dbName, final String tblName, final short maxParts, final String userName, final List<String> groupNames) throws TException { String[] parsedDbName = parseDbName(dbName, conf); startTableFunction("get_partitions_with_auth", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName); List<Partition> ret = null; Exception ex = null; try { checkLimitNumberOfPartitionsByFilter(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName, NO_FILTER_STRING, maxParts); authorizeTableForPartitionMetadata(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName); ret = getMS().getPartitionsWithAuth(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName, maxParts, userName, groupNames); ret = FilterUtils.filterPartitionsIfEnabled(isServerFilterEnabled, filterHook, ret); } catch (Exception e) { ex = e; handleException(e).convertIfInstance(InvalidObjectException.class, NoSuchObjectException.class) .rethrowException(e); } finally { endFunction("get_partitions_with_auth", ret != null, ex, tblName); } return ret; } private void checkLimitNumberOfPartitionsByFilter(String catName, String dbName, String tblName, String filterString, int maxParts) throws TException { if (isPartitionLimitEnabled()) { checkLimitNumberOfPartitions(tblName, get_num_partitions_by_filter(prependCatalogToDbName( catName, dbName, conf), tblName, filterString), maxParts); } } private void checkLimitNumberOfPartitionsByExpr(String catName, String dbName, String tblName, byte[] filterExpr, int maxParts) throws TException { if (isPartitionLimitEnabled()) { checkLimitNumberOfPartitions(tblName, get_num_partitions_by_expr(catName, dbName, tblName, filterExpr), maxParts); } } private boolean isPartitionLimitEnabled() { int partitionLimit = MetastoreConf.getIntVar(conf, ConfVars.LIMIT_PARTITION_REQUEST); return partitionLimit > -1; } private void checkLimitNumberOfPartitions(String tblName, int numPartitions, int maxToFetch) throws MetaException { if (isPartitionLimitEnabled()) { int partitionLimit = MetastoreConf.getIntVar(conf, ConfVars.LIMIT_PARTITION_REQUEST); int partitionRequest = (maxToFetch < 0) ? numPartitions : maxToFetch; if (partitionRequest > partitionLimit) { String configName = ConfVars.LIMIT_PARTITION_REQUEST.toString(); throw new MetaException(String.format(PARTITION_NUMBER_EXCEED_LIMIT_MSG, partitionRequest, tblName, partitionLimit, configName)); } } } @Override @Deprecated public List<PartitionSpec> get_partitions_pspec(final String db_name, final String tbl_name, final int max_parts) throws NoSuchObjectException, MetaException { String[] parsedDbName = parseDbName(db_name, conf); String tableName = tbl_name.toLowerCase(); startTableFunction("get_partitions_pspec", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName); List<PartitionSpec> partitionSpecs = null; try { Table table = get_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName); // get_partitions will parse out the catalog and db names itself List<Partition> partitions = get_partitions(db_name, tableName, (short) max_parts); if (is_partition_spec_grouping_enabled(table)) { partitionSpecs = MetaStoreServerUtils .getPartitionspecsGroupedByStorageDescriptor(table, partitions); } else { PartitionSpec pSpec = new PartitionSpec(); pSpec.setPartitionList(new PartitionListComposingSpec(partitions)); pSpec.setCatName(parsedDbName[CAT_NAME]); pSpec.setDbName(parsedDbName[DB_NAME]); pSpec.setTableName(tableName); pSpec.setRootPath(table.getSd().getLocation()); partitionSpecs = Arrays.asList(pSpec); } return partitionSpecs; } finally { endFunction("get_partitions_pspec", partitionSpecs != null && !partitionSpecs.isEmpty(), null, tbl_name); } } @Override public GetPartitionsResponse get_partitions_with_specs(GetPartitionsRequest request) throws MetaException, TException { String catName = null; if (request.isSetCatName()) { catName = request.getCatName(); } String[] parsedDbName = parseDbName(request.getDbName(), conf); String tableName = request.getTblName(); if (catName == null) { // if catName is not provided in the request use the catName parsed from the dbName catName = parsedDbName[CAT_NAME]; } startTableFunction("get_partitions_with_specs", catName, parsedDbName[DB_NAME], tableName); GetPartitionsResponse response = null; Exception ex = null; try { Table table = get_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName); List<Partition> partitions = getMS() .getPartitionSpecsByFilterAndProjection(table, request.getProjectionSpec(), request.getFilterSpec()); List<String> processorCapabilities = request.getProcessorCapabilities(); String processorId = request.getProcessorIdentifier(); if (processorCapabilities == null || processorCapabilities.size() == 0 || processorCapabilities.contains("MANAGERAWMETADATA")) { LOG.info("Skipping translation for processor with " + processorId); } else { if (transformer != null) { partitions = transformer.transformPartitions(partitions, table, processorCapabilities, processorId); } } List<PartitionSpec> partitionSpecs = MetaStoreServerUtils.getPartitionspecsGroupedByStorageDescriptor(table, partitions); response = new GetPartitionsResponse(); response.setPartitionSpec(partitionSpecs); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_partitions_with_specs", response != null, ex, tableName); } return response; } private static boolean is_partition_spec_grouping_enabled(Table table) { Map<String, String> parameters = table.getParameters(); return parameters.containsKey("hive.hcatalog.partition.spec.grouping.enabled") && parameters.get("hive.hcatalog.partition.spec.grouping.enabled").equalsIgnoreCase("true"); } @Override @Deprecated public List<String> get_partition_names(final String db_name, final String tbl_name, final short max_parts) throws NoSuchObjectException, MetaException { String[] parsedDbName = parseDbName(db_name, conf); startTableFunction("get_partition_names", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); fireReadTablePreEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); List<String> ret = null; Exception ex = null; try { authorizeTableForPartitionMetadata(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); ret = getMS().listPartitionNames(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, max_parts); ret = FilterUtils.filterPartitionNamesIfEnabled(isServerFilterEnabled, filterHook, parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, ret); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_partition_names", ret != null, ex, tbl_name); } return ret; } @Override public PartitionValuesResponse get_partition_values(PartitionValuesRequest request) throws MetaException { String catName = request.isSetCatName() ? request.getCatName() : getDefaultCatalog(conf); String dbName = request.getDbName(); String tblName = request.getTblName(); try { authorizeTableForPartitionMetadata(catName, dbName, tblName); // This is serious black magic, as the following 2 lines do nothing AFAICT but without them // the subsequent call to listPartitionValues fails. List<FieldSchema> partCols = new ArrayList<FieldSchema>(); partCols.add(request.getPartitionKeys().get(0)); return getMS().listPartitionValues(catName, dbName, tblName, request.getPartitionKeys(), request.isApplyDistinct(), request.getFilter(), request.isAscending(), request.getPartitionOrder(), request.getMaxParts()); } catch (NoSuchObjectException e) { LOG.error(String.format("Unable to get partition for %s.%s.%s", catName, dbName, tblName), e); throw new MetaException(e.getMessage()); } } @Deprecated @Override public void alter_partition(final String db_name, final String tbl_name, final Partition new_part) throws TException { rename_partition(db_name, tbl_name, null, new_part); } @Deprecated @Override public void alter_partition_with_environment_context(final String dbName, final String tableName, final Partition newPartition, final EnvironmentContext envContext) throws TException { String[] parsedDbName = parseDbName(dbName, conf); // TODO: this method name is confusing, it actually does full alter (sortof) rename_partition(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, null, newPartition, envContext, null); } @Deprecated @Override public void rename_partition(final String db_name, final String tbl_name, final List<String> part_vals, final Partition new_part) throws TException { // Call rename_partition without an environment context. String[] parsedDbName = parseDbName(db_name, conf); rename_partition(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals, new_part, null, null); } @Override public RenamePartitionResponse rename_partition_req( RenamePartitionRequest req) throws InvalidOperationException ,MetaException ,TException { rename_partition(req.getCatName(), req.getDbName(), req.getTableName(), req.getPartVals(), req.getNewPart(), null, req.getValidWriteIdList()); return new RenamePartitionResponse(); }; private void rename_partition(String catName, String db_name, String tbl_name, List<String> part_vals, Partition new_part, EnvironmentContext envContext, String validWriteIds) throws TException { startTableFunction("alter_partition", catName, db_name, tbl_name); if (LOG.isInfoEnabled()) { LOG.info("New partition values:" + new_part.getValues()); if (part_vals != null && part_vals.size() > 0) { LOG.info("Old Partition values:" + part_vals); } } // Adds the missing scheme/authority for the new partition location if (new_part.getSd() != null) { String newLocation = new_part.getSd().getLocation(); if (org.apache.commons.lang3.StringUtils.isNotEmpty(newLocation)) { Path tblPath = wh.getDnsPath(new Path(newLocation)); new_part.getSd().setLocation(tblPath.toString()); } } // Make sure the new partition has the catalog value set if (!new_part.isSetCatName()) { new_part.setCatName(catName); } Partition oldPart = null; Exception ex = null; try { Table table = null; table = getMS().getTable(catName, db_name, tbl_name, null); firePreEvent(new PreAlterPartitionEvent(db_name, tbl_name, table, part_vals, new_part, this)); if (part_vals != null && !part_vals.isEmpty()) { MetaStoreServerUtils.validatePartitionNameCharacters(new_part.getValues(), partitionValidationPattern); } oldPart = alterHandler.alterPartition(getMS(), wh, catName, db_name, tbl_name, part_vals, new_part, envContext, this, validWriteIds); // Only fetch the table if we actually have a listener if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_PARTITION, new AlterPartitionEvent(oldPart, new_part, table, false, true, new_part.getWriteId(), this), envContext); } } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidOperationException.class) .convertIfInstance(InvalidObjectException.class, InvalidOperationException.class) .convertIfInstance(AlreadyExistsException.class, InvalidOperationException.class) .defaultMetaException(); } finally { endFunction("alter_partition", oldPart != null, ex, tbl_name); } } @Override public void alter_partitions(final String db_name, final String tbl_name, final List<Partition> new_parts) throws TException { String[] o = parseDbName(db_name, conf); alter_partitions_with_environment_context(o[0], o[1], tbl_name, new_parts, null, null, -1); } @Override public AlterPartitionsResponse alter_partitions_req(AlterPartitionsRequest req) throws TException { alter_partitions_with_environment_context(req.getCatName(), req.getDbName(), req.getTableName(), req.getPartitions(), req.getEnvironmentContext(), req.isSetValidWriteIdList() ? req.getValidWriteIdList() : null, req.isSetWriteId() ? req.getWriteId() : -1); return new AlterPartitionsResponse(); } // The old API we are keeping for backward compat. Not used within Hive. @Deprecated @Override public void alter_partitions_with_environment_context(final String db_name, final String tbl_name, final List<Partition> new_parts, EnvironmentContext environmentContext) throws TException { String[] o = parseDbName(db_name, conf); alter_partitions_with_environment_context(o[0], o[1], tbl_name, new_parts, environmentContext, null, -1); } private void alter_partitions_with_environment_context(String catName, String db_name, final String tbl_name, final List<Partition> new_parts, EnvironmentContext environmentContext, String writeIdList, long writeId) throws TException { if (environmentContext == null) { environmentContext = new EnvironmentContext(); } if (catName == null) { catName = MetaStoreUtils.getDefaultCatalog(conf); } startTableFunction("alter_partitions", catName, db_name, tbl_name); if (LOG.isInfoEnabled()) { for (Partition tmpPart : new_parts) { LOG.info("New partition values:" + tmpPart.getValues()); } } // all partitions are altered atomically // all prehooks are fired together followed by all post hooks List<Partition> oldParts = null; Exception ex = null; Lock tableLock = getTableLockFor(db_name, tbl_name); tableLock.lock(); try { Table table = null; table = getMS().getTable(catName, db_name, tbl_name, null); for (Partition tmpPart : new_parts) { // Make sure the catalog name is set in the new partition if (!tmpPart.isSetCatName()) { tmpPart.setCatName(getDefaultCatalog(conf)); } firePreEvent(new PreAlterPartitionEvent(db_name, tbl_name, table, null, tmpPart, this)); } oldParts = alterHandler.alterPartitions(getMS(), wh, catName, db_name, tbl_name, new_parts, environmentContext, writeIdList, writeId, this); Iterator<Partition> olditr = oldParts.iterator(); for (Partition tmpPart : new_parts) { Partition oldTmpPart; if (olditr.hasNext()) { oldTmpPart = olditr.next(); } else { throw new InvalidOperationException("failed to alterpartitions"); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_PARTITION, new AlterPartitionEvent(oldTmpPart, tmpPart, table, false, true, writeId, this)); } } } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidOperationException.class) .convertIfInstance(InvalidObjectException.class, InvalidOperationException.class) .convertIfInstance(AlreadyExistsException.class, InvalidOperationException.class) .defaultMetaException(); } finally { tableLock.unlock(); endFunction("alter_partition", oldParts != null, ex, tbl_name); } } @Override public String getVersion() throws TException { String version = MetastoreVersionInfo.getVersion(); endFunction(startFunction("getVersion"), version != null, null); return version; } @Override public void alter_table(final String dbname, final String name, final Table newTable) throws InvalidOperationException, MetaException { // Do not set an environment context. String[] parsedDbName = parseDbName(dbname, conf); alter_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], name, newTable, null, null, null, null); } @Override public void alter_table_with_cascade(final String dbname, final String name, final Table newTable, final boolean cascade) throws InvalidOperationException, MetaException { EnvironmentContext envContext = null; if (cascade) { envContext = new EnvironmentContext(); envContext.putToProperties(StatsSetupConst.CASCADE, StatsSetupConst.TRUE); } String[] parsedDbName = parseDbName(dbname, conf); alter_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], name, newTable, envContext, null, null, null); } @Override public AlterTableResponse alter_table_req(AlterTableRequest req) throws InvalidOperationException, MetaException, TException { alter_table_core(req.getCatName(), req.getDbName(), req.getTableName(), req.getTable(), req.getEnvironmentContext(), req.getValidWriteIdList(), req.getProcessorCapabilities(), req.getProcessorIdentifier()); return new AlterTableResponse(); } @Override public void alter_table_with_environment_context(final String dbname, final String name, final Table newTable, final EnvironmentContext envContext) throws InvalidOperationException, MetaException { String[] parsedDbName = parseDbName(dbname, conf); alter_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], name, newTable, envContext, null, null, null); } private void alter_table_core(String catName, String dbname, String name, Table newTable, EnvironmentContext envContext, String validWriteIdList, List<String> processorCapabilities, String processorId) throws InvalidOperationException, MetaException { startFunction("alter_table", ": " + TableName.getQualified(catName, dbname, name) + " newtbl=" + newTable.getTableName()); if (envContext == null) { envContext = new EnvironmentContext(); } if (catName == null) { catName = MetaStoreUtils.getDefaultCatalog(conf); } // HIVE-25282: Drop/Alter table in REMOTE db should fail try { Database db = get_database_core(catName, dbname); if (db != null && db.getType().equals(DatabaseType.REMOTE)) { throw new MetaException("Alter table in REMOTE database " + db.getName() + " is not allowed"); } } catch (NoSuchObjectException e) { throw new InvalidOperationException("Alter table in REMOTE database is not allowed"); } // Update the time if it hasn't been specified. if (newTable.getParameters() == null || newTable.getParameters().get(hive_metastoreConstants.DDL_TIME) == null) { newTable.putToParameters(hive_metastoreConstants.DDL_TIME, Long.toString(System .currentTimeMillis() / 1000)); } // Adds the missing scheme/authority for the new table location if (newTable.getSd() != null) { String newLocation = newTable.getSd().getLocation(); if (org.apache.commons.lang3.StringUtils.isNotEmpty(newLocation)) { Path tblPath = wh.getDnsPath(new Path(newLocation)); newTable.getSd().setLocation(tblPath.toString()); } } // Set the catalog name if it hasn't been set in the new table if (!newTable.isSetCatName()) { newTable.setCatName(catName); } boolean success = false; Exception ex = null; try { GetTableRequest request = new GetTableRequest(dbname, name); request.setCatName(catName); Table oldt = get_table_core(request); if (transformer != null) { newTable = transformer.transformAlterTable(oldt, newTable, processorCapabilities, processorId); } firePreEvent(new PreAlterTableEvent(oldt, newTable, this)); alterHandler.alterTable(getMS(), wh, catName, dbname, name, newTable, envContext, this, validWriteIdList); success = true; } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidOperationException.class) .convertIfInstance(NoSuchObjectException.class, InvalidOperationException.class) .defaultMetaException(); } finally { endFunction("alter_table", success, ex, name); } } @Override public List<String> get_tables(final String dbname, final String pattern) throws MetaException { startFunction("get_tables", ": db=" + dbname + " pat=" + pattern); List<String> ret = null; Exception ex = null; String[] parsedDbName = parseDbName(dbname, conf); try { if (isDatabaseRemote(dbname)) { Database db = get_database_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME]); return DataConnectorProviderFactory.getDataConnectorProvider(db).getTableNames(); } } catch (Exception e) { /* appears we return empty set instead of throwing an exception */ } try { ret = getMS().getTables(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], pattern); if(ret != null && !ret.isEmpty()) { List<Table> tableInfo = new ArrayList<>(); tableInfo = getMS().getTableObjectsByName(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], ret); tableInfo = FilterUtils.filterTablesIfEnabled(isServerFilterEnabled, filterHook, tableInfo);// tableInfo object has the owner information of the table which is being passed to FilterUtils. ret = new ArrayList<>(); for (Table tbl : tableInfo) { ret.add(tbl.getTableName()); } } } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_tables", ret != null, ex); } return ret; } @Override public List<String> get_tables_by_type(final String dbname, final String pattern, final String tableType) throws MetaException { startFunction("get_tables_by_type", ": db=" + dbname + " pat=" + pattern + ",type=" + tableType); List<String> ret = null; Exception ex = null; String[] parsedDbName = parseDbName(dbname, conf); try { ret = getTablesByTypeCore(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], pattern, tableType); ret = FilterUtils.filterTableNamesIfEnabled(isServerFilterEnabled, filterHook, parsedDbName[CAT_NAME], parsedDbName[DB_NAME], ret); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_tables_by_type", ret != null, ex); } return ret; } private List<String> getTablesByTypeCore(final String catName, final String dbname, final String pattern, final String tableType) throws MetaException { startFunction("getTablesByTypeCore", ": catName=" + catName + ": db=" + dbname + " pat=" + pattern + ",type=" + tableType); List<String> ret = null; Exception ex = null; Database db = null; try { db = get_database_core(catName, dbname); if (db != null) { if (db.getType().equals(DatabaseType.REMOTE)) { return DataConnectorProviderFactory.getDataConnectorProvider(db).getTableNames(); } } } catch (Exception e) { /* ignore */ } try { ret = getMS().getTables(catName, dbname, pattern, TableType.valueOf(tableType), -1); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("getTablesByTypeCore", ret != null, ex); } return ret; } @Override public List<Table> get_all_materialized_view_objects_for_rewriting() throws MetaException { startFunction("get_all_materialized_view_objects_for_rewriting"); List<Table> ret = null; Exception ex = null; try { ret = getMS().getAllMaterializedViewObjectsForRewriting(DEFAULT_CATALOG_NAME); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_all_materialized_view_objects_for_rewriting", ret != null, ex); } return ret; } @Override public List<String> get_materialized_views_for_rewriting(final String dbname) throws MetaException { startFunction("get_materialized_views_for_rewriting", ": db=" + dbname); List<String> ret = null; Exception ex = null; String[] parsedDbName = parseDbName(dbname, conf); try { ret = getMS().getMaterializedViewsForRewriting(parsedDbName[CAT_NAME], parsedDbName[DB_NAME]); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_materialized_views_for_rewriting", ret != null, ex); } return ret; } @Override public List<String> get_all_tables(final String dbname) throws MetaException { startFunction("get_all_tables", ": db=" + dbname); List<String> ret = null; Exception ex = null; String[] parsedDbName = parseDbName(dbname, conf); try { ret = getMS().getAllTables(parsedDbName[CAT_NAME], parsedDbName[DB_NAME]); ret = FilterUtils.filterTableNamesIfEnabled(isServerFilterEnabled, filterHook, parsedDbName[CAT_NAME], parsedDbName[DB_NAME], ret); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_all_tables", ret != null, ex); } return ret; } /** * Use {@link #get_fields_req(GetFieldsRequest)} ()} instead. * */ @Override @Deprecated public List<FieldSchema> get_fields(String db, String tableName) throws MetaException, UnknownTableException, UnknownDBException { return get_fields_with_environment_context(db, tableName, null); } @Override @Deprecated public List<FieldSchema> get_fields_with_environment_context(String db, String tableName, final EnvironmentContext envContext) throws MetaException, UnknownTableException, UnknownDBException { startFunction("get_fields_with_environment_context", ": db=" + db + "tbl=" + tableName); String[] names = tableName.split("\\."); String base_table_name = names[0]; String[] parsedDbName = parseDbName(db, conf); Table tbl; List<FieldSchema> ret = null; Exception ex = null; try { try { tbl = get_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], base_table_name); firePreEvent(new PreReadTableEvent(tbl, this)); } catch (NoSuchObjectException e) { throw new UnknownTableException(e.getMessage()); } if (null == tbl.getSd().getSerdeInfo().getSerializationLib() || MetastoreConf.getStringCollection(conf, ConfVars.SERDES_USING_METASTORE_FOR_SCHEMA).contains( tbl.getSd().getSerdeInfo().getSerializationLib())) { ret = tbl.getSd().getCols(); } else { StorageSchemaReader schemaReader = getStorageSchemaReader(); ret = schemaReader.readSchema(tbl, envContext, getConf()); } } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(UnknownTableException.class, MetaException.class).defaultMetaException(); } finally { endFunction("get_fields_with_environment_context", ret != null, ex, tableName); } return ret; } @Override public GetFieldsResponse get_fields_req(GetFieldsRequest req) throws MetaException, UnknownTableException, UnknownDBException, TException { String dbName = MetaStoreUtils.prependCatalogToDbName(req.getCatName(), req.getDbName(), conf); List<FieldSchema> fields = get_fields_with_environment_context( dbName, req.getTblName(), req.getEnvContext()); GetFieldsResponse res = new GetFieldsResponse(); res.setFields(fields); return res; } private StorageSchemaReader getStorageSchemaReader() throws MetaException { if (storageSchemaReader == null) { String className = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.STORAGE_SCHEMA_READER_IMPL); Class<? extends StorageSchemaReader> readerClass = JavaUtils.getClass(className, StorageSchemaReader.class); try { storageSchemaReader = readerClass.newInstance(); } catch (InstantiationException|IllegalAccessException e) { LOG.error("Unable to instantiate class " + className, e); throw new MetaException(e.getMessage()); } } return storageSchemaReader; } /** * Use {@link #get_schema_req(GetSchemaRequest)} ()} instead. * */ @Override @Deprecated public List<FieldSchema> get_schema(String db, String tableName) throws MetaException, UnknownTableException, UnknownDBException { return get_schema_with_environment_context(db,tableName, null); } /** * Return the schema of the table. This function includes partition columns * in addition to the regular columns. * * @param db * Name of the database * @param tableName * Name of the table * @param envContext * Store session based properties * @return List of columns, each column is a FieldSchema structure * @throws MetaException * @throws UnknownTableException * @throws UnknownDBException */ @Override @Deprecated public List<FieldSchema> get_schema_with_environment_context(String db, String tableName, final EnvironmentContext envContext) throws MetaException, UnknownTableException, UnknownDBException { startFunction("get_schema_with_environment_context", ": db=" + db + "tbl=" + tableName); boolean success = false; Exception ex = null; try { String[] names = tableName.split("\\."); String base_table_name = names[0]; String[] parsedDbName = parseDbName(db, conf); Table tbl; try { tbl = get_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], base_table_name); } catch (NoSuchObjectException e) { throw new UnknownTableException(e.getMessage()); } // Pass unparsed db name here List<FieldSchema> fieldSchemas = get_fields_with_environment_context(db, base_table_name, envContext); if (tbl == null || fieldSchemas == null) { throw new UnknownTableException(tableName + " doesn't exist"); } if (tbl.getPartitionKeys() != null) { // Combine the column field schemas and the partition keys to create the // whole schema fieldSchemas.addAll(tbl.getPartitionKeys()); } success = true; return fieldSchemas; } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(UnknownDBException.class, UnknownTableException.class, MetaException.class) .defaultMetaException(); } finally { endFunction("get_schema_with_environment_context", success, ex, tableName); } } @Override public GetSchemaResponse get_schema_req(GetSchemaRequest req) throws MetaException, UnknownTableException, UnknownDBException, TException { String dbName = MetaStoreUtils.prependCatalogToDbName(req.getCatName(), req.getDbName(), conf); List<FieldSchema> fields = get_schema_with_environment_context( dbName, req.getTblName(), req.getEnvContext()); GetSchemaResponse res = new GetSchemaResponse(); res.setFields(fields); return res; } @Override public String getCpuProfile(int profileDurationInSec) throws TException { return ""; } /** * Returns the value of the given configuration variable name. If the * configuration variable with the given name doesn't exist, or if there * were an exception thrown while retrieving the variable, or if name is * null, defaultValue is returned. */ @Override public String get_config_value(String name, String defaultValue) throws TException { startFunction("get_config_value", ": name=" + name + " defaultValue=" + defaultValue); boolean success = false; Exception ex = null; try { if (name == null) { success = true; return defaultValue; } // Allow only keys that start with hive.*, hdfs.*, mapred.* for security // i.e. don't allow access to db password if (!Pattern.matches("(hive|hdfs|mapred|metastore).*", name)) { throw new ConfigValSecurityException("For security reasons, the " + "config key " + name + " cannot be accessed"); } String toReturn = defaultValue; try { toReturn = MetastoreConf.get(conf, name); if (toReturn == null) { toReturn = defaultValue; } } catch (RuntimeException e) { LOG.error(threadLocalId.get().toString() + ": " + "RuntimeException thrown in get_config_value - msg: " + e.getMessage() + " cause: " + e.getCause()); } success = true; return toReturn; } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(TException.class).defaultMetaException(); } finally { endFunction("get_config_value", success, ex); } } public static List<String> getPartValsFromName(Table t, String partName) throws MetaException, InvalidObjectException { Preconditions.checkArgument(t != null, "Table can not be null"); // Unescape the partition name LinkedHashMap<String, String> hm = Warehouse.makeSpecFromName(partName); List<String> partVals = new ArrayList<>(); for (FieldSchema field : t.getPartitionKeys()) { String key = field.getName(); String val = hm.get(key); if (val == null) { throw new InvalidObjectException("incomplete partition name - missing " + key); } partVals.add(val); } return partVals; } private List<String> getPartValsFromName(RawStore ms, String catName, String dbName, String tblName, String partName) throws MetaException, InvalidObjectException { Table t = ms.getTable(catName, dbName, tblName, null); if (t == null) { throw new InvalidObjectException(dbName + "." + tblName + " table not found"); } return getPartValsFromName(t, partName); } private Partition get_partition_by_name_core(final RawStore ms, final String catName, final String db_name, final String tbl_name, final String part_name) throws TException { fireReadTablePreEvent(catName, db_name, tbl_name); List<String> partVals; try { partVals = getPartValsFromName(ms, catName, db_name, tbl_name, part_name); } catch (InvalidObjectException e) { throw new NoSuchObjectException(e.getMessage()); } Partition p = ms.getPartition(catName, db_name, tbl_name, partVals); p = FilterUtils.filterPartitionIfEnabled(isServerFilterEnabled, filterHook, p); if (p == null) { throw new NoSuchObjectException(TableName.getQualified(catName, db_name, tbl_name) + " partition (" + part_name + ") not found"); } return p; } @Override @Deprecated public Partition get_partition_by_name(final String db_name, final String tbl_name, final String part_name) throws TException { String[] parsedDbName = parseDbName(db_name, conf); startFunction("get_partition_by_name", ": tbl=" + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name) + " part=" + part_name); Partition ret = null; Exception ex = null; try { ret = get_partition_by_name_core(getMS(), parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_name); ret = FilterUtils.filterPartitionIfEnabled(isServerFilterEnabled, filterHook, ret); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_partition_by_name", ret != null, ex, tbl_name); } return ret; } @Override public Partition append_partition_by_name(final String db_name, final String tbl_name, final String part_name) throws TException { return append_partition_by_name_with_environment_context(db_name, tbl_name, part_name, null); } @Override public Partition append_partition_by_name_with_environment_context(final String db_name, final String tbl_name, final String part_name, final EnvironmentContext env_context) throws TException { String[] parsedDbName = parseDbName(db_name, conf); startFunction("append_partition_by_name", ": tbl=" + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name) + " part=" + part_name); Partition ret = null; Exception ex = null; try { RawStore ms = getMS(); List<String> partVals = getPartValsFromName(ms, parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_name); ret = append_partition_common(ms, parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, partVals, env_context); } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(InvalidObjectException.class, AlreadyExistsException.class, MetaException.class) .defaultMetaException(); } finally { endFunction("append_partition_by_name", ret != null, ex, tbl_name); } return ret; } private boolean drop_partition_by_name_core(final RawStore ms, final String catName, final String db_name, final String tbl_name, final String part_name, final boolean deleteData, final EnvironmentContext envContext) throws TException, IOException { List<String> partVals; try { partVals = getPartValsFromName(ms, catName, db_name, tbl_name, part_name); } catch (InvalidObjectException e) { throw new NoSuchObjectException(e.getMessage()); } return drop_partition_common(ms, catName, db_name, tbl_name, partVals, deleteData, envContext); } @Override public boolean drop_partition_by_name(final String db_name, final String tbl_name, final String part_name, final boolean deleteData) throws TException { return drop_partition_by_name_with_environment_context(db_name, tbl_name, part_name, deleteData, null); } @Override public boolean drop_partition_by_name_with_environment_context(final String db_name, final String tbl_name, final String part_name, final boolean deleteData, final EnvironmentContext envContext) throws TException { String[] parsedDbName = parseDbName(db_name, conf); startFunction("drop_partition_by_name", ": tbl=" + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name) + " part=" + part_name); boolean ret = false; Exception ex = null; try { ret = drop_partition_by_name_core(getMS(), parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_name, deleteData, envContext); } catch (Exception e) { ex = e; handleException(e).convertIfInstance(IOException.class, MetaException.class).rethrowException(e); } finally { endFunction("drop_partition_by_name", ret, ex, tbl_name); } return ret; } @Override @Deprecated public List<Partition> get_partitions_ps(final String db_name, final String tbl_name, final List<String> part_vals, final short max_parts) throws TException { String[] parsedDbName = parseDbName(db_name, conf); startPartitionFunction("get_partitions_ps", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals); List<Partition> ret = null; Exception ex = null; try { authorizeTableForPartitionMetadata(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); // Don't send the parsedDbName, as this method will parse itself. ret = get_partitions_ps_with_auth(db_name, tbl_name, part_vals, max_parts, null, null); ret = FilterUtils.filterPartitionsIfEnabled(isServerFilterEnabled, filterHook, ret); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_partitions_ps", ret != null, ex, tbl_name); } return ret; } /** * Use {@link #get_partitions_ps_with_auth_req(GetPartitionsPsWithAuthRequest)} ()} instead. * */ @Override @Deprecated public List<Partition> get_partitions_ps_with_auth(final String db_name, final String tbl_name, final List<String> part_vals, final short max_parts, final String userName, final List<String> groupNames) throws TException { String[] parsedDbName = parseDbName(db_name, conf); startPartitionFunction("get_partitions_ps_with_auth", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals); fireReadTablePreEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); List<Partition> ret = null; Exception ex = null; try { authorizeTableForPartitionMetadata(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); ret = getMS().listPartitionsPsWithAuth(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals, max_parts, userName, groupNames); ret = FilterUtils.filterPartitionsIfEnabled(isServerFilterEnabled, filterHook, ret); } catch (Exception e) { ex = e; handleException(e).convertIfInstance(InvalidObjectException.class, MetaException.class).rethrowException(e); } finally { endFunction("get_partitions_ps_with_auth", ret != null, ex, tbl_name); } return ret; } @Override public GetPartitionsPsWithAuthResponse get_partitions_ps_with_auth_req(GetPartitionsPsWithAuthRequest req) throws MetaException, NoSuchObjectException, TException { String dbName = MetaStoreUtils.prependCatalogToDbName(req.getCatName(), req.getDbName(), conf); List<Partition> partitions = null; if (req.getPartVals() == null) { partitions = get_partitions_with_auth(dbName, req.getTblName(), req.getMaxParts(), req.getUserName(), req.getGroupNames()); } else { partitions = get_partitions_ps_with_auth(dbName, req.getTblName(), req.getPartVals(), req.getMaxParts(), req.getUserName(), req.getGroupNames()); } GetPartitionsPsWithAuthResponse res = new GetPartitionsPsWithAuthResponse(); res.setPartitions(partitions); return res; } /** * Use {@link #get_partition_names_ps_req(GetPartitionNamesPsRequest)} ()} instead. * */ @Override @Deprecated public List<String> get_partition_names_ps(final String db_name, final String tbl_name, final List<String> part_vals, final short max_parts) throws TException { String[] parsedDbName = parseDbName(db_name, conf); startPartitionFunction("get_partitions_names_ps", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals); fireReadTablePreEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); List<String> ret = null; Exception ex = null; try { authorizeTableForPartitionMetadata(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); ret = getMS().listPartitionNamesPs(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals, max_parts); ret = FilterUtils.filterPartitionNamesIfEnabled(isServerFilterEnabled, filterHook, parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, ret); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_partitions_names_ps", ret != null, ex, tbl_name); } return ret; } @Override public GetPartitionNamesPsResponse get_partition_names_ps_req(GetPartitionNamesPsRequest req) throws MetaException, NoSuchObjectException, TException { String dbName = MetaStoreUtils.prependCatalogToDbName(req.getCatName(), req.getDbName(), conf); List<String> names = get_partition_names_ps(dbName, req.getTblName(), req.getPartValues(), req.getMaxParts()); GetPartitionNamesPsResponse res = new GetPartitionNamesPsResponse(); res.setNames(names); return res; } @Override public List<String> get_partition_names_req(PartitionsByExprRequest req) throws MetaException, NoSuchObjectException, TException { String catName = req.isSetCatName() ? req.getCatName() : getDefaultCatalog(conf); String dbName = req.getDbName(), tblName = req.getTblName(); startTableFunction("get_partition_names_req", catName, dbName, tblName); fireReadTablePreEvent(catName, dbName, tblName); List<String> ret = null; Exception ex = null; try { authorizeTableForPartitionMetadata(catName, dbName, tblName); ret = getMS().listPartitionNames(catName, dbName, tblName, req.getDefaultPartitionName(), req.getExpr(), req.getOrder(), req.getMaxParts()); ret = FilterUtils.filterPartitionNamesIfEnabled(isServerFilterEnabled, filterHook, catName, dbName, tblName, ret); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_partition_names_req", ret != null, ex, tblName); } return ret; } @Override public List<String> partition_name_to_vals(String part_name) throws TException { if (part_name.length() == 0) { return Collections.emptyList(); } LinkedHashMap<String, String> map = Warehouse.makeSpecFromName(part_name); return new ArrayList<>(map.values()); } @Override public Map<String, String> partition_name_to_spec(String part_name) throws TException { if (part_name.length() == 0) { return new HashMap<>(); } return Warehouse.makeSpecFromName(part_name); } public static String lowerCaseConvertPartName(String partName) throws MetaException { if (partName == null) { return partName; } boolean isFirst = true; Map<String, String> partSpec = Warehouse.makeEscSpecFromName(partName); String convertedPartName = new String(); for (Map.Entry<String, String> entry : partSpec.entrySet()) { String partColName = entry.getKey(); String partColVal = entry.getValue(); if (!isFirst) { convertedPartName += "/"; } else { isFirst = false; } convertedPartName += partColName.toLowerCase() + "=" + partColVal; } return convertedPartName; } @Override @Deprecated public ColumnStatistics get_table_column_statistics(String dbName, String tableName, String colName) throws TException { String[] parsedDbName = parseDbName(dbName, conf); parsedDbName[CAT_NAME] = parsedDbName[CAT_NAME].toLowerCase(); parsedDbName[DB_NAME] = parsedDbName[DB_NAME].toLowerCase(); tableName = tableName.toLowerCase(); colName = colName.toLowerCase(); startFunction("get_column_statistics_by_table", ": table=" + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName) + " column=" + colName); ColumnStatistics statsObj = null; try { statsObj = getMS().getTableColumnStatistics( parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, Lists.newArrayList(colName), "hive", null); if (statsObj != null) { assert statsObj.getStatsObjSize() <= 1; } return statsObj; } finally { endFunction("get_column_statistics_by_table", statsObj != null, null, tableName); } } @Override public TableStatsResult get_table_statistics_req(TableStatsRequest request) throws TException { String catName = request.isSetCatName() ? request.getCatName().toLowerCase() : getDefaultCatalog(conf); String dbName = request.getDbName().toLowerCase(); String tblName = request.getTblName().toLowerCase(); startFunction("get_table_statistics_req", ": table=" + TableName.getQualified(catName, dbName, tblName)); TableStatsResult result = null; List<String> lowerCaseColNames = new ArrayList<>(request.getColNames().size()); for (String colName : request.getColNames()) { lowerCaseColNames.add(colName.toLowerCase()); } try { ColumnStatistics cs = getMS().getTableColumnStatistics( catName, dbName, tblName, lowerCaseColNames, request.getEngine(), request.getValidWriteIdList()); // Note: stats compliance is not propagated to the client; instead, we just return nothing // if stats are not compliant for now. This won't work for stats merging, but that // is currently only done on metastore size (see set_aggr...). // For some optimizations we might make use of incorrect stats that are "better than // nothing", so this may change in future. result = new TableStatsResult((cs == null || cs.getStatsObj() == null || (cs.isSetIsStatsCompliant() && !cs.isIsStatsCompliant())) ? Lists.newArrayList() : cs.getStatsObj()); } finally { endFunction("get_table_statistics_req", result == null, null, tblName); } return result; } @Override @Deprecated public ColumnStatistics get_partition_column_statistics(String dbName, String tableName, String partName, String colName) throws TException { // Note: this method appears to be unused within Hive. // It doesn't take txn stats into account. dbName = dbName.toLowerCase(); String[] parsedDbName = parseDbName(dbName, conf); tableName = tableName.toLowerCase(); colName = colName.toLowerCase(); String convertedPartName = lowerCaseConvertPartName(partName); startFunction("get_column_statistics_by_partition", ": table=" + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName) + " partition=" + convertedPartName + " column=" + colName); ColumnStatistics statsObj = null; try { List<ColumnStatistics> list = getMS().getPartitionColumnStatistics( parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, Lists.newArrayList(convertedPartName), Lists.newArrayList(colName), "hive"); if (list.isEmpty()) { return null; } if (list.size() != 1) { throw new MetaException(list.size() + " statistics for single column and partition"); } statsObj = list.get(0); } finally { endFunction("get_column_statistics_by_partition", statsObj != null, null, tableName); } return statsObj; } @Override public PartitionsStatsResult get_partitions_statistics_req(PartitionsStatsRequest request) throws TException { String catName = request.isSetCatName() ? request.getCatName().toLowerCase() : getDefaultCatalog(conf); String dbName = request.getDbName().toLowerCase(); String tblName = request.getTblName().toLowerCase(); startFunction("get_partitions_statistics_req", ": table=" + TableName.getQualified(catName, dbName, tblName)); PartitionsStatsResult result = null; List<String> lowerCaseColNames = new ArrayList<>(request.getColNames().size()); for (String colName : request.getColNames()) { lowerCaseColNames.add(colName.toLowerCase()); } List<String> lowerCasePartNames = new ArrayList<>(request.getPartNames().size()); for (String partName : request.getPartNames()) { lowerCasePartNames.add(lowerCaseConvertPartName(partName)); } try { List<ColumnStatistics> stats = getMS().getPartitionColumnStatistics( catName, dbName, tblName, lowerCasePartNames, lowerCaseColNames, request.getEngine(), request.isSetValidWriteIdList() ? request.getValidWriteIdList() : null); Map<String, List<ColumnStatisticsObj>> map = new HashMap<>(); if (stats != null) { for (ColumnStatistics stat : stats) { // Note: stats compliance is not propagated to the client; instead, we just return nothing // if stats are not compliant for now. This won't work for stats merging, but that // is currently only done on metastore size (see set_aggr...). // For some optimizations we might make use of incorrect stats that are "better than // nothing", so this may change in future. if (stat.isSetIsStatsCompliant() && !stat.isIsStatsCompliant()) { continue; } map.put(stat.getStatsDesc().getPartName(), stat.getStatsObj()); } } result = new PartitionsStatsResult(map); } finally { endFunction("get_partitions_statistics_req", result == null, null, tblName); } return result; } @Override public boolean update_table_column_statistics(ColumnStatistics colStats) throws TException { // Deprecated API, won't work for transactional tables return updateTableColumnStatsInternal(colStats, null, -1); } @Override public SetPartitionsStatsResponse update_table_column_statistics_req( SetPartitionsStatsRequest req) throws NoSuchObjectException, InvalidObjectException, MetaException, InvalidInputException, TException { if (req.getColStatsSize() != 1) { throw new InvalidInputException("Only one stats object expected"); } if (req.isNeedMerge()) { throw new InvalidInputException("Merge is not supported for non-aggregate stats"); } ColumnStatistics colStats = req.getColStatsIterator().next(); boolean ret = updateTableColumnStatsInternal(colStats, req.getValidWriteIdList(), req.getWriteId()); return new SetPartitionsStatsResponse(ret); } private boolean updateTableColumnStatsInternal(ColumnStatistics colStats, String validWriteIds, long writeId) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException { normalizeColStatsInput(colStats); startFunction("write_column_statistics", ": table=" + TableName.getQualified( colStats.getStatsDesc().getCatName(), colStats.getStatsDesc().getDbName(), colStats.getStatsDesc().getTableName())); Map<String, String> parameters = null; getMS().openTransaction(); boolean committed = false; try { parameters = getMS().updateTableColumnStatistics(colStats, validWriteIds, writeId); if (parameters != null) { Table tableObj = getMS().getTable(colStats.getStatsDesc().getCatName(), colStats.getStatsDesc().getDbName(), colStats.getStatsDesc().getTableName(), validWriteIds); if (transactionalListeners != null && !transactionalListeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.UPDATE_TABLE_COLUMN_STAT, new UpdateTableColumnStatEvent(colStats, tableObj, parameters, writeId, this)); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.UPDATE_TABLE_COLUMN_STAT, new UpdateTableColumnStatEvent(colStats, tableObj, parameters, writeId,this)); } } committed = getMS().commitTransaction(); } finally { if (!committed) { getMS().rollbackTransaction(); } endFunction("write_column_statistics", parameters != null, null, colStats.getStatsDesc().getTableName()); } return parameters != null; } private void normalizeColStatsInput(ColumnStatistics colStats) throws MetaException { // TODO: is this really needed? this code is propagated from HIVE-1362 but most of it is useless. ColumnStatisticsDesc statsDesc = colStats.getStatsDesc(); statsDesc.setCatName(statsDesc.isSetCatName() ? statsDesc.getCatName().toLowerCase() : getDefaultCatalog(conf)); statsDesc.setDbName(statsDesc.getDbName().toLowerCase()); statsDesc.setTableName(statsDesc.getTableName().toLowerCase()); statsDesc.setPartName(lowerCaseConvertPartName(statsDesc.getPartName())); long time = System.currentTimeMillis() / 1000; statsDesc.setLastAnalyzed(time); for (ColumnStatisticsObj statsObj : colStats.getStatsObj()) { statsObj.setColName(statsObj.getColName().toLowerCase()); statsObj.setColType(statsObj.getColType().toLowerCase()); } colStats.setStatsDesc(statsDesc); colStats.setStatsObj(colStats.getStatsObj()); } private boolean updatePartitonColStatsInternal(Table tbl, ColumnStatistics colStats, String validWriteIds, long writeId) throws MetaException, InvalidObjectException, NoSuchObjectException, InvalidInputException { normalizeColStatsInput(colStats); ColumnStatisticsDesc csd = colStats.getStatsDesc(); String catName = csd.getCatName(), dbName = csd.getDbName(), tableName = csd.getTableName(); startFunction("write_partition_column_statistics", ": db=" + dbName + " table=" + tableName + " part=" + csd.getPartName()); boolean ret = false; Map<String, String> parameters; List<String> partVals; boolean committed = false; getMS().openTransaction(); try { if (tbl == null) { tbl = getTable(catName, dbName, tableName); } partVals = getPartValsFromName(tbl, csd.getPartName()); parameters = getMS().updatePartitionColumnStatistics(colStats, partVals, validWriteIds, writeId); if (parameters != null) { if (transactionalListeners != null && !transactionalListeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.UPDATE_PARTITION_COLUMN_STAT, new UpdatePartitionColumnStatEvent(colStats, partVals, parameters, tbl, writeId, this)); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.UPDATE_PARTITION_COLUMN_STAT, new UpdatePartitionColumnStatEvent(colStats, partVals, parameters, tbl, writeId, this)); } } committed = getMS().commitTransaction(); } finally { if (!committed) { getMS().rollbackTransaction(); } endFunction("write_partition_column_statistics", ret != false, null, tableName); } return parameters != null; } private void updatePartitionColStatsForOneBatch(Table tbl, Map<String, ColumnStatistics> statsMap, String validWriteIds, long writeId) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException { Map<String, Map<String, String>> result = getMS().updatePartitionColumnStatisticsInBatch(statsMap, tbl, transactionalListeners, validWriteIds, writeId); if (result != null && result.size() != 0 && listeners != null) { // The normal listeners, unlike transaction listeners are not using the same transactions used by the update // operations. So there is no need of keeping them within the same transactions. If notification to one of // the listeners failed, then even if we abort the transaction, we can not revert the notifications sent to the // other listeners. for (Map.Entry entry : result.entrySet()) { Map<String, String> parameters = (Map<String, String>) entry.getValue(); ColumnStatistics colStats = statsMap.get(entry.getKey()); List<String> partVals = getPartValsFromName(tbl, colStats.getStatsDesc().getPartName()); MetaStoreListenerNotifier.notifyEvent(listeners, EventMessage.EventType.UPDATE_PARTITION_COLUMN_STAT, new UpdatePartitionColumnStatEvent(colStats, partVals, parameters, tbl, writeId, this)); } } } private boolean updatePartitionColStatsInBatch(Table tbl, Map<String, ColumnStatistics> statsMap, String validWriteIds, long writeId) throws MetaException, InvalidObjectException, NoSuchObjectException, InvalidInputException { if (statsMap.size() == 0) { return false; } String catalogName = tbl.getCatName(); String dbName = tbl.getDbName(); String tableName = tbl.getTableName(); startFunction("updatePartitionColStatsInBatch", ": db=" + dbName + " table=" + tableName); long start = System.currentTimeMillis(); Map<String, ColumnStatistics> newStatsMap = new HashMap<>(); long numStats = 0; long numStatsMax = MetastoreConf.getIntVar(conf, ConfVars.JDBC_MAX_BATCH_SIZE); try { for (Map.Entry entry : statsMap.entrySet()) { ColumnStatistics colStats = (ColumnStatistics) entry.getValue(); normalizeColStatsInput(colStats); assert catalogName.equalsIgnoreCase(colStats.getStatsDesc().getCatName()); assert dbName.equalsIgnoreCase(colStats.getStatsDesc().getDbName()); assert tableName.equalsIgnoreCase(colStats.getStatsDesc().getTableName()); newStatsMap.put((String) entry.getKey(), colStats); numStats += colStats.getStatsObjSize(); if (newStatsMap.size() >= numStatsMax) { updatePartitionColStatsForOneBatch(tbl, newStatsMap, validWriteIds, writeId); newStatsMap.clear(); numStats = 0; } } if (numStats != 0) { updatePartitionColStatsForOneBatch(tbl, newStatsMap, validWriteIds, writeId); } } finally { endFunction("updatePartitionColStatsInBatch", true, null, tableName); long end = System.currentTimeMillis(); float sec = (end - start) / 1000F; LOG.info("updatePartitionColStatsInBatch took " + sec + " seconds for " + statsMap.size() + " stats"); } return true; } @Override public boolean update_partition_column_statistics(ColumnStatistics colStats) throws TException { // Deprecated API. return updatePartitonColStatsInternal(null, colStats, null, -1); } @Override public SetPartitionsStatsResponse update_partition_column_statistics_req( SetPartitionsStatsRequest req) throws NoSuchObjectException, InvalidObjectException, MetaException, InvalidInputException, TException { if (req.getColStatsSize() != 1) { throw new InvalidInputException("Only one stats object expected"); } if (req.isNeedMerge()) { throw new InvalidInputException("Merge is not supported for non-aggregate stats"); } ColumnStatistics colStats = req.getColStatsIterator().next(); boolean ret = updatePartitonColStatsInternal(null, colStats, req.getValidWriteIdList(), req.getWriteId()); return new SetPartitionsStatsResponse(ret); } @Override public boolean delete_partition_column_statistics(String dbName, String tableName, String partName, String colName, String engine) throws TException { dbName = dbName.toLowerCase(); String[] parsedDbName = parseDbName(dbName, conf); tableName = tableName.toLowerCase(); if (colName != null) { colName = colName.toLowerCase(); } String convertedPartName = lowerCaseConvertPartName(partName); startFunction("delete_column_statistics_by_partition",": table=" + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName) + " partition=" + convertedPartName + " column=" + colName); boolean ret = false, committed = false; getMS().openTransaction(); try { List<String> partVals = getPartValsFromName(getMS(), parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, convertedPartName); Table table = getMS().getTable(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName); // This API looks unused; if it were used we'd need to update stats state and write ID. // We cannot just randomly nuke some txn stats. if (TxnUtils.isTransactionalTable(table)) { throw new MetaException("Cannot delete stats via this API for a transactional table"); } ret = getMS().deletePartitionColumnStatistics(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, convertedPartName, partVals, colName, engine); if (ret) { if (transactionalListeners != null && !transactionalListeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DELETE_PARTITION_COLUMN_STAT, new DeletePartitionColumnStatEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, convertedPartName, partVals, colName, engine, this)); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DELETE_PARTITION_COLUMN_STAT, new DeletePartitionColumnStatEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, convertedPartName, partVals, colName, engine, this)); } } committed = getMS().commitTransaction(); } finally { if (!committed) { getMS().rollbackTransaction(); } endFunction("delete_column_statistics_by_partition", ret != false, null, tableName); } return ret; } @Override public boolean delete_table_column_statistics(String dbName, String tableName, String colName, String engine) throws TException { dbName = dbName.toLowerCase(); tableName = tableName.toLowerCase(); String[] parsedDbName = parseDbName(dbName, conf); if (colName != null) { colName = colName.toLowerCase(); } startFunction("delete_column_statistics_by_table", ": table=" + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName) + " column=" + colName); boolean ret = false, committed = false; getMS().openTransaction(); try { Table table = getMS().getTable(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName); // This API looks unused; if it were used we'd need to update stats state and write ID. // We cannot just randomly nuke some txn stats. if (TxnUtils.isTransactionalTable(table)) { throw new MetaException("Cannot delete stats via this API for a transactional table"); } ret = getMS().deleteTableColumnStatistics(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, colName, engine); if (ret) { if (transactionalListeners != null && !transactionalListeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DELETE_TABLE_COLUMN_STAT, new DeleteTableColumnStatEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, colName, engine, this)); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DELETE_TABLE_COLUMN_STAT, new DeleteTableColumnStatEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, colName, engine, this)); } } committed = getMS().commitTransaction(); } finally { if (!committed) { getMS().rollbackTransaction(); } endFunction("delete_column_statistics_by_table", ret != false, null, tableName); } return ret; } @Override @Deprecated public List<Partition> get_partitions_by_filter(final String dbName, final String tblName, final String filter, final short maxParts) throws TException { String[] parsedDbName = parseDbName(dbName, conf); startTableFunction("get_partitions_by_filter", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName); fireReadTablePreEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName); List<Partition> ret = null; Exception ex = null; try { checkLimitNumberOfPartitionsByFilter(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName, filter, maxParts); authorizeTableForPartitionMetadata(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName); ret = getMS().getPartitionsByFilter(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName, filter, maxParts); ret = FilterUtils.filterPartitionsIfEnabled(isServerFilterEnabled, filterHook, ret); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_partitions_by_filter", ret != null, ex, tblName); } return ret; } @Override @Deprecated public List<PartitionSpec> get_part_specs_by_filter(final String dbName, final String tblName, final String filter, final int maxParts) throws TException { String[] parsedDbName = parseDbName(dbName, conf); startTableFunction("get_partitions_by_filter_pspec", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName); List<PartitionSpec> partitionSpecs = null; try { Table table = get_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName); // Don't pass the parsed db name, as get_partitions_by_filter will parse it itself List<Partition> partitions = get_partitions_by_filter(dbName, tblName, filter, (short) maxParts); if (is_partition_spec_grouping_enabled(table)) { partitionSpecs = MetaStoreServerUtils .getPartitionspecsGroupedByStorageDescriptor(table, partitions); } else { PartitionSpec pSpec = new PartitionSpec(); pSpec.setPartitionList(new PartitionListComposingSpec(partitions)); pSpec.setRootPath(table.getSd().getLocation()); pSpec.setCatName(parsedDbName[CAT_NAME]); pSpec.setDbName(parsedDbName[DB_NAME]); pSpec.setTableName(tblName); partitionSpecs = Arrays.asList(pSpec); } return partitionSpecs; } finally { endFunction("get_partitions_by_filter_pspec", partitionSpecs != null && !partitionSpecs.isEmpty(), null, tblName); } } @Override public PartitionsSpecByExprResult get_partitions_spec_by_expr( PartitionsByExprRequest req) throws TException { String dbName = req.getDbName(), tblName = req.getTblName(); String catName = req.isSetCatName() ? req.getCatName() : getDefaultCatalog(conf); startTableFunction("get_partitions_spec_by_expr", catName, dbName, tblName); fireReadTablePreEvent(catName, dbName, tblName); PartitionsSpecByExprResult ret = null; Exception ex = null; try { checkLimitNumberOfPartitionsByExpr(catName, dbName, tblName, req.getExpr(), UNLIMITED_MAX_PARTITIONS); List<Partition> partitions = new LinkedList<>(); boolean hasUnknownPartitions = getMS().getPartitionsByExpr(catName, dbName, tblName, req.getExpr(), req.getDefaultPartitionName(), req.getMaxParts(), partitions); Table table = get_table_core(catName, dbName, tblName); List<PartitionSpec> partitionSpecs = MetaStoreServerUtils.getPartitionspecsGroupedByStorageDescriptor(table, partitions); ret = new PartitionsSpecByExprResult(partitionSpecs, hasUnknownPartitions); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_partitions_spec_by_expr", ret != null, ex, tblName); } return ret; } @Override public PartitionsByExprResult get_partitions_by_expr( PartitionsByExprRequest req) throws TException { String dbName = req.getDbName(), tblName = req.getTblName(); String catName = req.isSetCatName() ? req.getCatName() : getDefaultCatalog(conf); startTableFunction("get_partitions_by_expr", catName, dbName, tblName); fireReadTablePreEvent(catName, dbName, tblName); PartitionsByExprResult ret = null; Exception ex = null; try { checkLimitNumberOfPartitionsByExpr(catName, dbName, tblName, req.getExpr(), UNLIMITED_MAX_PARTITIONS); List<Partition> partitions = new LinkedList<>(); boolean hasUnknownPartitions = getMS().getPartitionsByExpr(catName, dbName, tblName, req.getExpr(), req.getDefaultPartitionName(), req.getMaxParts(), partitions); ret = new PartitionsByExprResult(partitions, hasUnknownPartitions); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_partitions_by_expr", ret != null, ex, tblName); } return ret; } @Override @Deprecated public int get_num_partitions_by_filter(final String dbName, final String tblName, final String filter) throws TException { String[] parsedDbName = parseDbName(dbName, conf); if (parsedDbName[DB_NAME] == null || tblName == null) { throw new MetaException("The DB and table name cannot be null."); } startTableFunction("get_num_partitions_by_filter", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName); int ret = -1; Exception ex = null; try { ret = getMS().getNumPartitionsByFilter(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName, filter); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_num_partitions_by_filter", ret != -1, ex, tblName); } return ret; } private int get_num_partitions_by_expr(final String catName, final String dbName, final String tblName, final byte[] expr) throws TException { int ret = -1; Exception ex = null; try { ret = getMS().getNumPartitionsByExpr(catName, dbName, tblName, expr); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_num_partitions_by_expr", ret != -1, ex, tblName); } return ret; } @Override @Deprecated public List<Partition> get_partitions_by_names(final String dbName, final String tblName, final List<String> partNames) throws TException { return get_partitions_by_names(dbName, tblName, partNames, false, null, null); } @Override public GetPartitionsByNamesResult get_partitions_by_names_req(GetPartitionsByNamesRequest gpbnr) throws TException { List<Partition> partitions = get_partitions_by_names(gpbnr.getDb_name(), gpbnr.getTbl_name(), gpbnr.getNames(), gpbnr.isSetGet_col_stats() && gpbnr.isGet_col_stats(), gpbnr.getEngine(), gpbnr.getProcessorCapabilities(), gpbnr.getProcessorIdentifier()); return new GetPartitionsByNamesResult(partitions); } public List<Partition> get_partitions_by_names(final String dbName, final String tblName, final List<String> partNames, boolean getColStats, String engine, String validWriteIdList) throws TException { return get_partitions_by_names( dbName, tblName, partNames, getColStats, engine, null, null); } public List<Partition> get_partitions_by_names(final String dbName, final String tblName, final List<String> partNames, boolean getColStats, String engine, List<String> processorCapabilities, String processorId) throws TException { String[] dbNameParts = parseDbName(dbName, conf); String parsedCatName = dbNameParts[CAT_NAME]; String parsedDbName = dbNameParts[DB_NAME]; List<Partition> ret = null; Table table = null; Exception ex = null; boolean success = false; startTableFunction("get_partitions_by_names", parsedCatName, parsedDbName, tblName); try { getMS().openTransaction(); authorizeTableForPartitionMetadata(parsedCatName, parsedDbName, tblName); fireReadTablePreEvent(parsedCatName, parsedDbName, tblName); ret = getMS().getPartitionsByNames(parsedCatName, parsedDbName, tblName, partNames); ret = FilterUtils.filterPartitionsIfEnabled(isServerFilterEnabled, filterHook, ret); table = getTable(parsedCatName, parsedDbName, tblName); // If requested add column statistics in each of the partition objects if (getColStats) { // Since each partition may have stats collected for different set of columns, we // request them separately. for (Partition part: ret) { String partName = Warehouse.makePartName(table.getPartitionKeys(), part.getValues()); List<ColumnStatistics> partColStatsList = getMS().getPartitionColumnStatistics(parsedCatName, parsedDbName, tblName, Collections.singletonList(partName), StatsSetupConst.getColumnsHavingStats(part.getParameters()), engine); if (partColStatsList != null && !partColStatsList.isEmpty()) { ColumnStatistics partColStats = partColStatsList.get(0); if (partColStats != null) { part.setColStats(partColStats); } } } } if (processorCapabilities == null || processorCapabilities.size() == 0 || processorCapabilities.contains("MANAGERAWMETADATA")) { LOG.info("Skipping translation for processor with " + processorId); } else { if (transformer != null) { ret = transformer.transformPartitions(ret, table, processorCapabilities, processorId); } } success = getMS().commitTransaction(); } catch (Exception e) { ex = e; rethrowException(e); } finally { if (!success) { getMS().rollbackTransaction(); } endFunction("get_partitions_by_names", ret != null, ex, tblName); } return ret; } @Override public PrincipalPrivilegeSet get_privilege_set(HiveObjectRef hiveObject, String userName, List<String> groupNames) throws TException { firePreEvent(new PreAuthorizationCallEvent(this)); String catName = hiveObject.isSetCatName() ? hiveObject.getCatName() : getDefaultCatalog(conf); HiveObjectType debug = hiveObject.getObjectType(); if (hiveObject.getObjectType() == HiveObjectType.COLUMN) { String partName = getPartName(hiveObject); return this.get_column_privilege_set(catName, hiveObject.getDbName(), hiveObject .getObjectName(), partName, hiveObject.getColumnName(), userName, groupNames); } else if (hiveObject.getObjectType() == HiveObjectType.PARTITION) { String partName = getPartName(hiveObject); return this.get_partition_privilege_set(catName, hiveObject.getDbName(), hiveObject.getObjectName(), partName, userName, groupNames); } else if (hiveObject.getObjectType() == HiveObjectType.DATABASE) { return this.get_db_privilege_set(catName, hiveObject.getDbName(), userName, groupNames); } else if (hiveObject.getObjectType() == HiveObjectType.DATACONNECTOR) { return this.get_connector_privilege_set(catName, hiveObject.getObjectName(), userName, groupNames); } else if (hiveObject.getObjectType() == HiveObjectType.TABLE) { return this.get_table_privilege_set(catName, hiveObject.getDbName(), hiveObject .getObjectName(), userName, groupNames); } else if (hiveObject.getObjectType() == HiveObjectType.GLOBAL) { return this.get_user_privilege_set(userName, groupNames); } return null; } private String getPartName(HiveObjectRef hiveObject) throws MetaException { String partName = null; List<String> partValue = hiveObject.getPartValues(); if (partValue != null && partValue.size() > 0) { try { String catName = hiveObject.isSetCatName() ? hiveObject.getCatName() : getDefaultCatalog(conf); Table table = get_table_core(catName, hiveObject.getDbName(), hiveObject .getObjectName()); partName = Warehouse .makePartName(table.getPartitionKeys(), partValue); } catch (NoSuchObjectException e) { throw new MetaException(e.getMessage()); } } return partName; } private PrincipalPrivilegeSet get_column_privilege_set(String catName, final String dbName, final String tableName, final String partName, final String columnName, final String userName, final List<String> groupNames) throws TException { incrementCounter("get_column_privilege_set"); PrincipalPrivilegeSet ret; try { ret = getMS().getColumnPrivilegeSet( catName, dbName, tableName, partName, columnName, userName, groupNames); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } return ret; } private PrincipalPrivilegeSet get_db_privilege_set(String catName, final String dbName, final String userName, final List<String> groupNames) throws TException { incrementCounter("get_db_privilege_set"); PrincipalPrivilegeSet ret; try { ret = getMS().getDBPrivilegeSet(catName, dbName, userName, groupNames); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } return ret; } private PrincipalPrivilegeSet get_connector_privilege_set(String catName, final String connectorName, final String userName, final List<String> groupNames) throws TException { incrementCounter("get_connector_privilege_set"); PrincipalPrivilegeSet ret; try { ret = getMS().getConnectorPrivilegeSet(catName, connectorName, userName, groupNames); } catch (MetaException e) { throw e; } catch (Exception e) { throw new RuntimeException(e); } return ret; } private PrincipalPrivilegeSet get_partition_privilege_set( String catName, final String dbName, final String tableName, final String partName, final String userName, final List<String> groupNames) throws TException { incrementCounter("get_partition_privilege_set"); PrincipalPrivilegeSet ret; try { ret = getMS().getPartitionPrivilegeSet(catName, dbName, tableName, partName, userName, groupNames); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } return ret; } private PrincipalPrivilegeSet get_table_privilege_set(String catName, final String dbName, final String tableName, final String userName, final List<String> groupNames) throws TException { incrementCounter("get_table_privilege_set"); PrincipalPrivilegeSet ret; try { ret = getMS().getTablePrivilegeSet(catName, dbName, tableName, userName, groupNames); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } return ret; } @Override public boolean grant_role(final String roleName, final String principalName, final PrincipalType principalType, final String grantor, final PrincipalType grantorType, final boolean grantOption) throws TException { incrementCounter("add_role_member"); firePreEvent(new PreAuthorizationCallEvent(this)); if (PUBLIC.equals(roleName)) { throw new MetaException("No user can be added to " + PUBLIC +". Since all users implicitly" + " belong to " + PUBLIC + " role."); } Boolean ret; try { RawStore ms = getMS(); Role role = ms.getRole(roleName); if(principalType == PrincipalType.ROLE){ //check if this grant statement will end up creating a cycle if(isNewRoleAParent(principalName, roleName)){ throw new MetaException("Cannot grant role " + principalName + " to " + roleName + " as " + roleName + " already belongs to the role " + principalName + ". (no cycles allowed)"); } } ret = ms.grantRole(role, principalName, principalType, grantor, grantorType, grantOption); } catch (Exception e) { String exInfo = "Got exception: " + e.getClass().getName() + " " + e.getMessage(); LOG.error(exInfo, e); throw handleException(e).throwIfInstance(MetaException.class) .toMetaExceptionIfInstance(exInfo, InvalidObjectException.class, NoSuchObjectException.class) .defaultTException(); } return ret; } /** * Check if newRole is in parent hierarchy of curRole * @param newRole * @param curRole * @return true if newRole is curRole or present in its hierarchy * @throws MetaException */ private boolean isNewRoleAParent(String newRole, String curRole) throws MetaException { if(newRole.equals(curRole)){ return true; } //do this check recursively on all the parent roles of curRole List<Role> parentRoleMaps = getMS().listRoles(curRole, PrincipalType.ROLE); for(Role parentRole : parentRoleMaps){ if(isNewRoleAParent(newRole, parentRole.getRoleName())){ return true; } } return false; } @Override public List<Role> list_roles(final String principalName, final PrincipalType principalType) throws TException { incrementCounter("list_roles"); firePreEvent(new PreAuthorizationCallEvent(this)); return getMS().listRoles(principalName, principalType); } @Override public boolean create_role(final Role role) throws TException { incrementCounter("create_role"); firePreEvent(new PreAuthorizationCallEvent(this)); if (PUBLIC.equals(role.getRoleName())) { throw new MetaException(PUBLIC + " role implicitly exists. It can't be created."); } Boolean ret; try { ret = getMS().addRole(role.getRoleName(), role.getOwnerName()); } catch (Exception e) { String exInfo = "Got exception: " + e.getClass().getName() + " " + e.getMessage(); LOG.error(exInfo, e); throw handleException(e).throwIfInstance(MetaException.class) .toMetaExceptionIfInstance(exInfo, InvalidObjectException.class, NoSuchObjectException.class) .defaultTException(); } return ret; } @Override public boolean drop_role(final String roleName) throws TException { incrementCounter("drop_role"); firePreEvent(new PreAuthorizationCallEvent(this)); if (ADMIN.equals(roleName) || PUBLIC.equals(roleName)) { throw new MetaException(PUBLIC + "," + ADMIN + " roles can't be dropped."); } Boolean ret; try { ret = getMS().removeRole(roleName); } catch (Exception e) { String exInfo = "Got exception: " + e.getClass().getName() + " " + e.getMessage(); LOG.error(exInfo, e); throw handleException(e).throwIfInstance(MetaException.class) .toMetaExceptionIfInstance(exInfo, NoSuchObjectException.class) .defaultTException(); } return ret; } @Override public List<String> get_role_names() throws TException { incrementCounter("get_role_names"); firePreEvent(new PreAuthorizationCallEvent(this)); List<String> ret; try { ret = getMS().listRoleNames(); return ret; } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } } @Override public boolean grant_privileges(final PrivilegeBag privileges) throws TException { incrementCounter("grant_privileges"); firePreEvent(new PreAuthorizationCallEvent(this)); Boolean ret; try { ret = getMS().grantPrivileges(privileges); } catch (Exception e) { String exInfo = "Got exception: " + e.getClass().getName() + " " + e.getMessage(); LOG.error(exInfo, e); throw handleException(e).throwIfInstance(MetaException.class) .toMetaExceptionIfInstance(exInfo, InvalidObjectException.class, NoSuchObjectException.class) .defaultTException(); } return ret; } @Override public boolean revoke_role(final String roleName, final String userName, final PrincipalType principalType) throws TException { return revoke_role(roleName, userName, principalType, false); } private boolean revoke_role(final String roleName, final String userName, final PrincipalType principalType, boolean grantOption) throws TException { incrementCounter("remove_role_member"); firePreEvent(new PreAuthorizationCallEvent(this)); if (PUBLIC.equals(roleName)) { throw new MetaException(PUBLIC + " role can't be revoked."); } Boolean ret; try { RawStore ms = getMS(); Role mRole = ms.getRole(roleName); ret = ms.revokeRole(mRole, userName, principalType, grantOption); } catch (Exception e) { String exInfo = "Got exception: " + e.getClass().getName() + " " + e.getMessage(); LOG.error(exInfo, e); throw handleException(e).throwIfInstance(MetaException.class) .toMetaExceptionIfInstance(exInfo, NoSuchObjectException.class) .defaultTException(); } return ret; } @Override public GrantRevokeRoleResponse grant_revoke_role(GrantRevokeRoleRequest request) throws TException { GrantRevokeRoleResponse response = new GrantRevokeRoleResponse(); boolean grantOption = false; if (request.isSetGrantOption()) { grantOption = request.isGrantOption(); } switch (request.getRequestType()) { case GRANT: { boolean result = grant_role(request.getRoleName(), request.getPrincipalName(), request.getPrincipalType(), request.getGrantor(), request.getGrantorType(), grantOption); response.setSuccess(result); break; } case REVOKE: { boolean result = revoke_role(request.getRoleName(), request.getPrincipalName(), request.getPrincipalType(), grantOption); response.setSuccess(result); break; } default: throw new MetaException("Unknown request type " + request.getRequestType()); } return response; } @Override public GrantRevokePrivilegeResponse grant_revoke_privileges(GrantRevokePrivilegeRequest request) throws TException { GrantRevokePrivilegeResponse response = new GrantRevokePrivilegeResponse(); switch (request.getRequestType()) { case GRANT: { boolean result = grant_privileges(request.getPrivileges()); response.setSuccess(result); break; } case REVOKE: { boolean revokeGrantOption = false; if (request.isSetRevokeGrantOption()) { revokeGrantOption = request.isRevokeGrantOption(); } boolean result = revoke_privileges(request.getPrivileges(), revokeGrantOption); response.setSuccess(result); break; } default: throw new MetaException("Unknown request type " + request.getRequestType()); } return response; } @Override public GrantRevokePrivilegeResponse refresh_privileges(HiveObjectRef objToRefresh, String authorizer, GrantRevokePrivilegeRequest grantRequest) throws TException { incrementCounter("refresh_privileges"); firePreEvent(new PreAuthorizationCallEvent(this)); GrantRevokePrivilegeResponse response = new GrantRevokePrivilegeResponse(); try { boolean result = getMS().refreshPrivileges(objToRefresh, authorizer, grantRequest.getPrivileges()); response.setSuccess(result); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } return response; } @Override public boolean revoke_privileges(final PrivilegeBag privileges) throws TException { return revoke_privileges(privileges, false); } public boolean revoke_privileges(final PrivilegeBag privileges, boolean grantOption) throws TException { incrementCounter("revoke_privileges"); firePreEvent(new PreAuthorizationCallEvent(this)); Boolean ret; try { ret = getMS().revokePrivileges(privileges, grantOption); } catch (Exception e) { String exInfo = "Got exception: " + e.getClass().getName() + " " + e.getMessage(); LOG.error(exInfo, e); throw handleException(e).throwIfInstance(MetaException.class) .toMetaExceptionIfInstance(exInfo, InvalidObjectException.class, NoSuchObjectException.class) .defaultTException(); } return ret; } private PrincipalPrivilegeSet get_user_privilege_set(final String userName, final List<String> groupNames) throws TException { incrementCounter("get_user_privilege_set"); PrincipalPrivilegeSet ret; try { ret = getMS().getUserPrivilegeSet(userName, groupNames); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } return ret; } @Override public List<HiveObjectPrivilege> list_privileges(String principalName, PrincipalType principalType, HiveObjectRef hiveObject) throws TException { firePreEvent(new PreAuthorizationCallEvent(this)); String catName = hiveObject.isSetCatName() ? hiveObject.getCatName() : getDefaultCatalog(conf); if (hiveObject.getObjectType() == null) { return getAllPrivileges(principalName, principalType, catName); } if (hiveObject.getObjectType() == HiveObjectType.GLOBAL) { return list_global_privileges(principalName, principalType); } if (hiveObject.getObjectType() == HiveObjectType.DATABASE) { return list_db_privileges(principalName, principalType, catName, hiveObject .getDbName()); } if (hiveObject.getObjectType() == HiveObjectType.DATACONNECTOR) { return list_dc_privileges(principalName, principalType, hiveObject .getObjectName()); } if (hiveObject.getObjectType() == HiveObjectType.TABLE) { return list_table_privileges(principalName, principalType, catName, hiveObject.getDbName(), hiveObject.getObjectName()); } if (hiveObject.getObjectType() == HiveObjectType.PARTITION) { return list_partition_privileges(principalName, principalType, catName, hiveObject.getDbName(), hiveObject.getObjectName(), hiveObject .getPartValues()); } if (hiveObject.getObjectType() == HiveObjectType.COLUMN) { if (hiveObject.getPartValues() == null || hiveObject.getPartValues().isEmpty()) { return list_table_column_privileges(principalName, principalType, catName, hiveObject.getDbName(), hiveObject.getObjectName(), hiveObject.getColumnName()); } return list_partition_column_privileges(principalName, principalType, catName, hiveObject.getDbName(), hiveObject.getObjectName(), hiveObject .getPartValues(), hiveObject.getColumnName()); } return null; } private List<HiveObjectPrivilege> getAllPrivileges(String principalName, PrincipalType principalType, String catName) throws TException { List<HiveObjectPrivilege> privs = new ArrayList<>(); privs.addAll(list_global_privileges(principalName, principalType)); privs.addAll(list_db_privileges(principalName, principalType, catName, null)); privs.addAll(list_dc_privileges(principalName, principalType, null)); privs.addAll(list_table_privileges(principalName, principalType, catName, null, null)); privs.addAll(list_partition_privileges(principalName, principalType, catName, null, null, null)); privs.addAll(list_table_column_privileges(principalName, principalType, catName, null, null, null)); privs.addAll(list_partition_column_privileges(principalName, principalType, catName, null, null, null, null)); return privs; } private List<HiveObjectPrivilege> list_table_column_privileges( final String principalName, final PrincipalType principalType, String catName, final String dbName, final String tableName, final String columnName) throws TException { incrementCounter("list_table_column_privileges"); try { if (dbName == null) { return getMS().listPrincipalTableColumnGrantsAll(principalName, principalType); } if (principalName == null) { return getMS().listTableColumnGrantsAll(catName, dbName, tableName, columnName); } return getMS().listPrincipalTableColumnGrants(principalName, principalType, catName, dbName, tableName, columnName); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } } private List<HiveObjectPrivilege> list_partition_column_privileges( final String principalName, final PrincipalType principalType, String catName, final String dbName, final String tableName, final List<String> partValues, final String columnName) throws TException { incrementCounter("list_partition_column_privileges"); try { if (dbName == null) { return getMS().listPrincipalPartitionColumnGrantsAll(principalName, principalType); } Table tbl = get_table_core(catName, dbName, tableName); String partName = Warehouse.makePartName(tbl.getPartitionKeys(), partValues); if (principalName == null) { return getMS().listPartitionColumnGrantsAll(catName, dbName, tableName, partName, columnName); } return getMS().listPrincipalPartitionColumnGrants(principalName, principalType, catName, dbName, tableName, partValues, partName, columnName); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } } private List<HiveObjectPrivilege> list_db_privileges(final String principalName, final PrincipalType principalType, String catName, final String dbName) throws TException { incrementCounter("list_security_db_grant"); try { if (dbName == null) { return getMS().listPrincipalDBGrantsAll(principalName, principalType); } if (principalName == null) { return getMS().listDBGrantsAll(catName, dbName); } else { return getMS().listPrincipalDBGrants(principalName, principalType, catName, dbName); } } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } } private List<HiveObjectPrivilege> list_dc_privileges(final String principalName, final PrincipalType principalType, final String dcName) throws TException { incrementCounter("list_security_dc_grant"); try { if (dcName == null) { return getMS().listPrincipalDCGrantsAll(principalName, principalType); } if (principalName == null) { return getMS().listDCGrantsAll(dcName); } else { return getMS().listPrincipalDCGrants(principalName, principalType, dcName); } } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } } private List<HiveObjectPrivilege> list_partition_privileges( final String principalName, final PrincipalType principalType, String catName, final String dbName, final String tableName, final List<String> partValues) throws TException { incrementCounter("list_security_partition_grant"); try { if (dbName == null) { return getMS().listPrincipalPartitionGrantsAll(principalName, principalType); } Table tbl = get_table_core(catName, dbName, tableName); String partName = Warehouse.makePartName(tbl.getPartitionKeys(), partValues); if (principalName == null) { return getMS().listPartitionGrantsAll(catName, dbName, tableName, partName); } return getMS().listPrincipalPartitionGrants( principalName, principalType, catName, dbName, tableName, partValues, partName); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } } private List<HiveObjectPrivilege> list_table_privileges( final String principalName, final PrincipalType principalType, String catName, final String dbName, final String tableName) throws TException { incrementCounter("list_security_table_grant"); try { if (dbName == null) { return getMS().listPrincipalTableGrantsAll(principalName, principalType); } if (principalName == null) { return getMS().listTableGrantsAll(catName, dbName, tableName); } return getMS().listAllTableGrants(principalName, principalType, catName, dbName, tableName); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } } private List<HiveObjectPrivilege> list_global_privileges( final String principalName, final PrincipalType principalType) throws TException { incrementCounter("list_security_user_grant"); try { if (principalName == null) { return getMS().listGlobalGrantsAll(); } return getMS().listPrincipalGlobalGrants(principalName, principalType); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } } @Override public void cancel_delegation_token(String token_str_form) throws TException { startFunction("cancel_delegation_token"); boolean success = false; Exception ex = null; try { HiveMetaStore.cancelDelegationToken(token_str_form); success = true; } catch (Exception e) { ex = e; throw handleException(e).convertIfInstance(IOException.class, MetaException.class).defaultMetaException(); } finally { endFunction("cancel_delegation_token", success, ex); } } @Override public long renew_delegation_token(String token_str_form) throws TException { startFunction("renew_delegation_token"); Long ret = null; Exception ex = null; try { ret = HiveMetaStore.renewDelegationToken(token_str_form); } catch (Exception e) { ex = e; throw handleException(e).convertIfInstance(IOException.class, MetaException.class).defaultMetaException(); } finally { endFunction("renew_delegation_token", ret != null, ex); } return ret; } @Override public String get_delegation_token(String token_owner, String renewer_kerberos_principal_name) throws TException { startFunction("get_delegation_token"); String ret = null; Exception ex = null; try { ret = HiveMetaStore.getDelegationToken(token_owner, renewer_kerberos_principal_name, getIPAddress()); } catch (Exception e) { ex = e; throw handleException(e).convertIfInstance(IOException.class, MetaException.class) .convertIfInstance(InterruptedException.class, MetaException.class) .defaultMetaException(); } finally { endFunction("get_delegation_token", ret != null, ex); } return ret; } @Override public boolean add_token(String token_identifier, String delegation_token) throws TException { startFunction("add_token", ": " + token_identifier); boolean ret = false; Exception ex = null; try { ret = getMS().addToken(token_identifier, delegation_token); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("add_token", ret == true, ex); } return ret; } @Override public boolean remove_token(String token_identifier) throws TException { startFunction("remove_token", ": " + token_identifier); boolean ret = false; Exception ex = null; try { ret = getMS().removeToken(token_identifier); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("remove_token", ret == true, ex); } return ret; } @Override public String get_token(String token_identifier) throws TException { startFunction("get_token for", ": " + token_identifier); String ret = null; Exception ex = null; try { ret = getMS().getToken(token_identifier); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_token", ret != null, ex); } //Thrift cannot return null result return ret == null ? "" : ret; } @Override public List<String> get_all_token_identifiers() throws TException { startFunction("get_all_token_identifiers."); List<String> ret; Exception ex = null; try { ret = getMS().getAllTokenIdentifiers(); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_all_token_identifiers.", ex == null, ex); } return ret; } @Override public int add_master_key(String key) throws TException { startFunction("add_master_key."); int ret; Exception ex = null; try { ret = getMS().addMasterKey(key); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("add_master_key.", ex == null, ex); } return ret; } @Override public void update_master_key(int seq_number, String key) throws TException { startFunction("update_master_key."); Exception ex = null; try { getMS().updateMasterKey(seq_number, key); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("update_master_key.", ex == null, ex); } } @Override public boolean remove_master_key(int key_seq) throws TException { startFunction("remove_master_key."); Exception ex = null; boolean ret; try { ret = getMS().removeMasterKey(key_seq); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("remove_master_key.", ex == null, ex); } return ret; } @Override public List<String> get_master_keys() throws TException { startFunction("get_master_keys."); Exception ex = null; String [] ret = null; try { ret = getMS().getMasterKeys(); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_master_keys.", ret != null, ex); } return Arrays.asList(ret); } @Override public void markPartitionForEvent(final String db_name, final String tbl_name, final Map<String, String> partName, final PartitionEventType evtType) throws TException { Table tbl = null; Exception ex = null; RawStore ms = getMS(); boolean success = false; try { String[] parsedDbName = parseDbName(db_name, conf); ms.openTransaction(); startPartitionFunction("markPartitionForEvent", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, partName); firePreEvent(new PreLoadPartitionDoneEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, partName, this)); tbl = ms.markPartitionForEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, partName, evtType); if (null == tbl) { throw new UnknownTableException("Table: " + tbl_name + " not found."); } if (transactionalListeners.size() > 0) { LoadPartitionDoneEvent lpde = new LoadPartitionDoneEvent(true, tbl, partName, this); for (MetaStoreEventListener transactionalListener : transactionalListeners) { transactionalListener.onLoadPartitionDone(lpde); } } success = ms.commitTransaction(); for (MetaStoreEventListener listener : listeners) { listener.onLoadPartitionDone(new LoadPartitionDoneEvent(true, tbl, partName, this)); } } catch (Exception original) { ex = original; LOG.error("Exception caught in mark partition event ", original); throw handleException(original) .throwIfInstance(UnknownTableException.class, InvalidPartitionException.class, MetaException.class) .defaultMetaException(); } finally { if (!success) { ms.rollbackTransaction(); } endFunction("markPartitionForEvent", tbl != null, ex, tbl_name); } } @Override public boolean isPartitionMarkedForEvent(final String db_name, final String tbl_name, final Map<String, String> partName, final PartitionEventType evtType) throws TException { String[] parsedDbName = parseDbName(db_name, conf); startPartitionFunction("isPartitionMarkedForEvent", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, partName); Boolean ret = null; Exception ex = null; try { ret = getMS().isPartitionMarkedForEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, partName, evtType); } catch (Exception original) { LOG.error("Exception caught for isPartitionMarkedForEvent ", original); ex = original; throw handleException(original).throwIfInstance(UnknownTableException.class, InvalidPartitionException.class) .throwIfInstance(UnknownPartitionException.class, MetaException.class) .defaultMetaException(); } finally { endFunction("isPartitionMarkedForEvent", ret != null, ex, tbl_name); } return ret; } @Override public List<String> set_ugi(String username, List<String> groupNames) throws TException { Collections.addAll(groupNames, username); return groupNames; } @Override public boolean partition_name_has_valid_characters(List<String> part_vals, boolean throw_exception) throws TException { startFunction("partition_name_has_valid_characters"); boolean ret; Exception ex = null; try { if (throw_exception) { MetaStoreServerUtils.validatePartitionNameCharacters(part_vals, partitionValidationPattern); ret = true; } else { ret = MetaStoreServerUtils.partitionNameHasValidCharacters(part_vals, partitionValidationPattern); } } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("partition_name_has_valid_characters", true, ex); } return ret; } private void validateFunctionInfo(Function func) throws InvalidObjectException, MetaException { if (func == null) { throw new MetaException("Function cannot be null."); } if (func.getFunctionName() == null) { throw new MetaException("Function name cannot be null."); } if (func.getDbName() == null) { throw new MetaException("Database name in Function cannot be null."); } if (!MetaStoreUtils.validateName(func.getFunctionName(), null)) { throw new InvalidObjectException(func.getFunctionName() + " is not a valid object name"); } String className = func.getClassName(); if (className == null) { throw new InvalidObjectException("Function class name cannot be null"); } if (func.getOwnerType() == null) { throw new MetaException("Function owner type cannot be null."); } if (func.getFunctionType() == null) { throw new MetaException("Function type cannot be null."); } } @Override public void create_function(Function func) throws TException { validateFunctionInfo(func); boolean success = false; RawStore ms = getMS(); Map<String, String> transactionalListenerResponses = Collections.emptyMap(); try { String catName = func.isSetCatName() ? func.getCatName() : getDefaultCatalog(conf); if (!func.isSetOwnerName()) { try { func.setOwnerName(SecurityUtils.getUGI().getShortUserName()); } catch (Exception ex) { LOG.error("Cannot obtain username from the session to create a function", ex); throw new TException(ex); } } ms.openTransaction(); Database db = ms.getDatabase(catName, func.getDbName()); if (db == null) { throw new NoSuchObjectException("The database " + func.getDbName() + " does not exist"); } if (db.getType() == DatabaseType.REMOTE) { throw new MetaException("Operation create_function not support for REMOTE database"); } Function existingFunc = ms.getFunction(catName, func.getDbName(), func.getFunctionName()); if (existingFunc != null) { throw new AlreadyExistsException( "Function " + func.getFunctionName() + " already exists"); } firePreEvent(new PreCreateFunctionEvent(func, this)); long time = System.currentTimeMillis() / 1000; func.setCreateTime((int) time); ms.createFunction(func); if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.CREATE_FUNCTION, new CreateFunctionEvent(func, true, this)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.CREATE_FUNCTION, new CreateFunctionEvent(func, success, this), null, transactionalListenerResponses, ms); } } } @Override public void drop_function(String dbName, String funcName) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException { if (funcName == null) { throw new MetaException("Function name cannot be null."); } boolean success = false; Function func = null; RawStore ms = getMS(); Map<String, String> transactionalListenerResponses = Collections.emptyMap(); String[] parsedDbName = parseDbName(dbName, conf); if (parsedDbName[DB_NAME] == null) { throw new MetaException("Database name cannot be null."); } try { ms.openTransaction(); func = ms.getFunction(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], funcName); if (func == null) { throw new NoSuchObjectException("Function " + funcName + " does not exist"); } Boolean needsCm = ReplChangeManager.isSourceOfReplication(get_database_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME])); // if copy of jar to change management fails we fail the metastore transaction, since the // user might delete the jars on HDFS externally after dropping the function, hence having // a copy is required to allow incremental replication to work correctly. if (func.getResourceUris() != null && !func.getResourceUris().isEmpty()) { for (ResourceUri uri : func.getResourceUris()) { if (uri.getUri().toLowerCase().startsWith("hdfs:") && needsCm) { wh.addToChangeManagement(new Path(uri.getUri())); } } } firePreEvent(new PreDropFunctionEvent(func, this)); // if the operation on metastore fails, we don't do anything in change management, but fail // the metastore transaction, as having a copy of the jar in change management is not going // to cause any problem, the cleaner thread will remove this when this jar expires. ms.dropFunction(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], funcName); if (transactionalListeners.size() > 0) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DROP_FUNCTION, new DropFunctionEvent(func, true, this)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (listeners.size() > 0) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_FUNCTION, new DropFunctionEvent(func, success, this), null, transactionalListenerResponses, ms); } } } @Override public void alter_function(String dbName, String funcName, Function newFunc) throws TException { String[] parsedDbName = parseDbName(dbName, conf); validateForAlterFunction(parsedDbName[DB_NAME], funcName, newFunc); boolean success = false; RawStore ms = getMS(); try { firePreEvent(new PreCreateFunctionEvent(newFunc, this)); ms.openTransaction(); ms.alterFunction(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], funcName, newFunc); success = ms.commitTransaction(); } catch (InvalidObjectException e) { // Throwing MetaException instead of InvalidObjectException as the InvalidObjectException // is not defined for the alter_function method in the Thrift interface. throwMetaException(e); } finally { if (!success) { ms.rollbackTransaction(); } } } private void validateForAlterFunction(String dbName, String funcName, Function newFunc) throws MetaException { if (dbName == null || funcName == null) { throw new MetaException("Database and function name cannot be null."); } try { validateFunctionInfo(newFunc); } catch (InvalidObjectException e) { // The validateFunctionInfo method is used by the create and alter function methods as well // and it can throw InvalidObjectException. But the InvalidObjectException is not defined // for the alter_function method in the Thrift interface, therefore a TApplicationException // will occur at the caller side. Re-throwing the InvalidObjectException as MetaException // would eliminate the TApplicationException at caller side. throw newMetaException(e); } } @Override public List<String> get_functions(String dbName, String pattern) throws MetaException { startFunction("get_functions", ": db=" + dbName + " pat=" + pattern); RawStore ms = getMS(); Exception ex = null; List<String> funcNames = null; String[] parsedDbName = parseDbName(dbName, conf); try { funcNames = ms.getFunctions(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], pattern); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_functions", funcNames != null, ex); } return funcNames; } @Override public GetAllFunctionsResponse get_all_functions() throws MetaException { GetAllFunctionsResponse response = new GetAllFunctionsResponse(); startFunction("get_all_functions"); RawStore ms = getMS(); List<Function> allFunctions = null; Exception ex = null; try { // Leaving this as the 'hive' catalog (rather than choosing the default from the // configuration) because all the default UDFs are in that catalog, and I think that's // would people really want here. allFunctions = ms.getAllFunctions(DEFAULT_CATALOG_NAME); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_all_functions", allFunctions != null, ex); } response.setFunctions(allFunctions); return response; } @Override public Function get_function(String dbName, String funcName) throws TException { if (dbName == null || funcName == null) { throw new MetaException("Database and function name cannot be null."); } startFunction("get_function", ": " + dbName + "." + funcName); RawStore ms = getMS(); Function func = null; Exception ex = null; String[] parsedDbName = parseDbName(dbName, conf); try { func = ms.getFunction(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], funcName); if (func == null) { throw new NoSuchObjectException( "Function " + dbName + "." + funcName + " does not exist"); } } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(NoSuchObjectException.class).defaultMetaException(); } finally { endFunction("get_function", func != null, ex); } return func; } // Transaction and locking methods @Override public GetOpenTxnsResponse get_open_txns() throws TException { return getTxnHandler().getOpenTxns(); } @Override public GetOpenTxnsResponse get_open_txns_req(GetOpenTxnsRequest getOpenTxnsRequest) throws TException { return getTxnHandler().getOpenTxns(getOpenTxnsRequest.getExcludeTxnTypes()); } // Transaction and locking methods @Override public GetOpenTxnsInfoResponse get_open_txns_info() throws TException { return getTxnHandler().getOpenTxnsInfo(); } @Override public OpenTxnsResponse open_txns(OpenTxnRequest rqst) throws TException { OpenTxnsResponse response = getTxnHandler().openTxns(rqst); List<Long> txnIds = response.getTxn_ids(); boolean isHiveReplTxn = rqst.isSetReplPolicy() && TxnType.DEFAULT.equals(rqst.getTxn_type()); if (txnIds != null && listeners != null && !listeners.isEmpty() && !isHiveReplTxn) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.OPEN_TXN, new OpenTxnEvent(txnIds, this)); } return response; } @Override public void abort_txn(AbortTxnRequest rqst) throws TException { getTxnHandler().abortTxn(rqst); boolean isHiveReplTxn = rqst.isSetReplPolicy() && TxnType.DEFAULT.equals(rqst.getTxn_type()); if (listeners != null && !listeners.isEmpty() && !isHiveReplTxn) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ABORT_TXN, new AbortTxnEvent(rqst.getTxnid(), this)); } } @Override public void abort_txns(AbortTxnsRequest rqst) throws TException { getTxnHandler().abortTxns(rqst); if (listeners != null && !listeners.isEmpty()) { for (Long txnId : rqst.getTxn_ids()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ABORT_TXN, new AbortTxnEvent(txnId, this)); } } } @Override public long get_latest_txnid_in_conflict(long txnId) throws MetaException { return getTxnHandler().getLatestTxnIdInConflict(txnId); } @Override public void commit_txn(CommitTxnRequest rqst) throws TException { boolean isReplayedReplTxn = TxnType.REPL_CREATED.equals(rqst.getTxn_type()); boolean isHiveReplTxn = rqst.isSetReplPolicy() && TxnType.DEFAULT.equals(rqst.getTxn_type()); // in replication flow, the write notification log table will be updated here. if (rqst.isSetWriteEventInfos() && isReplayedReplTxn) { assert (rqst.isSetReplPolicy()); long targetTxnId = getTxnHandler().getTargetTxnId(rqst.getReplPolicy(), rqst.getTxnid()); if (targetTxnId < 0) { //looks like a retry return; } for (WriteEventInfo writeEventInfo : rqst.getWriteEventInfos()) { String[] filesAdded = ReplChangeManager.getListFromSeparatedString(writeEventInfo.getFiles()); List<String> partitionValue = null; Partition ptnObj = null; String root; Table tbl = getTblObject(writeEventInfo.getDatabase(), writeEventInfo.getTable(), null); if (writeEventInfo.getPartition() != null && !writeEventInfo.getPartition().isEmpty()) { partitionValue = Warehouse.getPartValuesFromPartName(writeEventInfo.getPartition()); ptnObj = getPartitionObj(writeEventInfo.getDatabase(), writeEventInfo.getTable(), partitionValue, tbl); root = ptnObj.getSd().getLocation(); } else { root = tbl.getSd().getLocation(); } InsertEventRequestData insertData = new InsertEventRequestData(); insertData.setReplace(true); // The files in the commit txn message during load will have files with path corresponding to source // warehouse. Need to transform them to target warehouse using table or partition object location. for (String file : filesAdded) { String[] decodedPath = ReplChangeManager.decodeFileUri(file); String name = (new Path(decodedPath[0])).getName(); Path newPath = FileUtils.getTransformedPath(name, decodedPath[3], root); insertData.addToFilesAdded(newPath.toUri().toString()); insertData.addToSubDirectoryList(decodedPath[3]); try { insertData.addToFilesAddedChecksum(ReplChangeManager.checksumFor(newPath, newPath.getFileSystem(conf))); } catch (IOException e) { LOG.error("failed to get checksum for the file " + newPath + " with error: " + e.getMessage()); throw new TException(e.getMessage()); } } WriteNotificationLogRequest wnRqst = new WriteNotificationLogRequest(targetTxnId, writeEventInfo.getWriteId(), writeEventInfo.getDatabase(), writeEventInfo.getTable(), insertData); if (partitionValue != null) { wnRqst.setPartitionVals(partitionValue); } addTxnWriteNotificationLog(tbl, ptnObj, wnRqst); } } getTxnHandler().commitTxn(rqst); if (listeners != null && !listeners.isEmpty() && !isHiveReplTxn) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.COMMIT_TXN, new CommitTxnEvent(rqst.getTxnid(), this)); Optional<CompactionInfo> compactionInfo = getTxnHandler().getCompactionByTxnId(rqst.getTxnid()); if (compactionInfo.isPresent()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.COMMIT_COMPACTION, new CommitCompactionEvent(rqst.getTxnid(), compactionInfo.get(), this)); } } } @Override public void repl_tbl_writeid_state(ReplTblWriteIdStateRequest rqst) throws TException { getTxnHandler().replTableWriteIdState(rqst); } @Override public GetValidWriteIdsResponse get_valid_write_ids(GetValidWriteIdsRequest rqst) throws TException { return getTxnHandler().getValidWriteIds(rqst); } @Override public void set_hadoop_jobid(String jobId, long cqId) { getTxnHandler().setHadoopJobId(jobId, cqId); } @Deprecated @Override public OptionalCompactionInfoStruct find_next_compact(String workerId) throws MetaException{ return CompactionInfo.compactionInfoToOptionalStruct( getTxnHandler().findNextToCompact(workerId)); } @Override public OptionalCompactionInfoStruct find_next_compact2(FindNextCompactRequest rqst) throws MetaException{ return CompactionInfo.compactionInfoToOptionalStruct( getTxnHandler().findNextToCompact(rqst)); } @Override public void mark_cleaned(CompactionInfoStruct cr) throws MetaException { getTxnHandler().markCleaned(CompactionInfo.compactionStructToInfo(cr)); } @Override public void mark_compacted(CompactionInfoStruct cr) throws MetaException { getTxnHandler().markCompacted(CompactionInfo.compactionStructToInfo(cr)); } @Override public void mark_failed(CompactionInfoStruct cr) throws MetaException { getTxnHandler().markFailed(CompactionInfo.compactionStructToInfo(cr)); } @Override public List<String> find_columns_with_stats(CompactionInfoStruct cr) throws MetaException { return getTxnHandler().findColumnsWithStats(CompactionInfo.compactionStructToInfo(cr)); } @Override public void update_compactor_state(CompactionInfoStruct cr, long highWaterMark) throws MetaException { getTxnHandler().updateCompactorState( CompactionInfo.compactionStructToInfo(cr), highWaterMark); } @Override public GetLatestCommittedCompactionInfoResponse get_latest_committed_compaction_info( GetLatestCommittedCompactionInfoRequest rqst) throws MetaException { if (rqst.getDbname() == null || rqst.getTablename() == null) { throw new MetaException("Database name and table name cannot be null."); } GetLatestCommittedCompactionInfoResponse response = getTxnHandler().getLatestCommittedCompactionInfo(rqst); return FilterUtils.filterCommittedCompactionInfoStructIfEnabled(isServerFilterEnabled, filterHook, getDefaultCatalog(conf), rqst.getDbname(), rqst.getTablename(), response); } @Override public AllocateTableWriteIdsResponse allocate_table_write_ids( AllocateTableWriteIdsRequest rqst) throws TException { AllocateTableWriteIdsResponse response = getTxnHandler().allocateTableWriteIds(rqst); if (listeners != null && !listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALLOC_WRITE_ID, new AllocWriteIdEvent(response.getTxnToWriteIds(), rqst.getDbName(), rqst.getTableName(), this)); } return response; } @Override public MaxAllocatedTableWriteIdResponse get_max_allocated_table_write_id(MaxAllocatedTableWriteIdRequest rqst) throws MetaException { return getTxnHandler().getMaxAllocatedTableWrited(rqst); } @Override public void seed_write_id(SeedTableWriteIdsRequest rqst) throws MetaException { getTxnHandler().seedWriteId(rqst); } @Override public void seed_txn_id(SeedTxnIdRequest rqst) throws MetaException { getTxnHandler().seedTxnId(rqst); } private void addTxnWriteNotificationLog(Table tableObj, Partition ptnObj, WriteNotificationLogRequest rqst) throws MetaException { String partition = ""; //Empty string is an invalid partition name. Can be used for non partitioned table. if (ptnObj != null) { partition = Warehouse.makePartName(tableObj.getPartitionKeys(), rqst.getPartitionVals()); } AcidWriteEvent event = new AcidWriteEvent(partition, tableObj, ptnObj, rqst); getTxnHandler().addWriteNotificationLog(event); if (listeners != null && !listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ACID_WRITE, event); } } private Table getTblObject(String db, String table, String catalog) throws MetaException, NoSuchObjectException { GetTableRequest req = new GetTableRequest(db, table); if (catalog != null) { req.setCatName(catalog); } req.setCapabilities(new ClientCapabilities(Lists.newArrayList(ClientCapability.TEST_CAPABILITY, ClientCapability.INSERT_ONLY_TABLES))); return get_table_req(req).getTable(); } private Partition getPartitionObj(String db, String table, List<String> partitionVals, Table tableObj) throws MetaException, NoSuchObjectException { if (tableObj.isSetPartitionKeys() && !tableObj.getPartitionKeys().isEmpty()) { return get_partition(db, table, partitionVals); } return null; } @Override public WriteNotificationLogResponse add_write_notification_log(WriteNotificationLogRequest rqst) throws TException { Table tableObj = getTblObject(rqst.getDb(), rqst.getTable(), null); Partition ptnObj = getPartitionObj(rqst.getDb(), rqst.getTable(), rqst.getPartitionVals(), tableObj); addTxnWriteNotificationLog(tableObj, ptnObj, rqst); return new WriteNotificationLogResponse(); } @Override public WriteNotificationLogBatchResponse add_write_notification_log_in_batch( WriteNotificationLogBatchRequest batchRequest) throws TException { if (batchRequest.getRequestList().size() == 0) { return new WriteNotificationLogBatchResponse(); } Table tableObj = getTblObject(batchRequest.getDb(), batchRequest.getTable(), batchRequest.getCatalog()); BatchAcidWriteEvent event = new BatchAcidWriteEvent(); List<String> partNameList = new ArrayList<>(); List<Partition> ptnObjList; Map<String, WriteNotificationLogRequest> rqstMap = new HashMap<>(); if (tableObj.getPartitionKeys().size() != 0) { // partitioned table for (WriteNotificationLogRequest rqst : batchRequest.getRequestList()) { String partition = Warehouse.makePartName(tableObj.getPartitionKeys(), rqst.getPartitionVals()); partNameList.add(partition); // This is used to ignore those request for which the partition does not exists. rqstMap.put(partition, rqst); } ptnObjList = getMS().getPartitionsByNames(tableObj.getCatName(), tableObj.getDbName(), tableObj.getTableName(), partNameList); } else { ptnObjList = new ArrayList<>(); for (WriteNotificationLogRequest ignored : batchRequest.getRequestList()) { ptnObjList.add(null); } } int idx = 0; for (Partition partObject : ptnObjList) { String partition = ""; //Empty string is an invalid partition name. Can be used for non partitioned table. WriteNotificationLogRequest request; if (partObject != null) { partition = Warehouse.makePartName(tableObj.getPartitionKeys(), partObject.getValues()); request = rqstMap.get(partition); } else { // for non partitioned table, we can get serially from the list. request = batchRequest.getRequestList().get(idx++); } event.addNotification(partition, tableObj, partObject, request); if (listeners != null && !listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.BATCH_ACID_WRITE, new BatchAcidWriteEvent(partition, tableObj, partObject, request)); } } getTxnHandler().addWriteNotificationLog(event); return new WriteNotificationLogBatchResponse(); } @Override public LockResponse lock(LockRequest rqst) throws TException { return getTxnHandler().lock(rqst); } @Override public LockResponse check_lock(CheckLockRequest rqst) throws TException { return getTxnHandler().checkLock(rqst); } @Override public void unlock(UnlockRequest rqst) throws TException { getTxnHandler().unlock(rqst); } @Override public ShowLocksResponse show_locks(ShowLocksRequest rqst) throws TException { return getTxnHandler().showLocks(rqst); } @Override public void heartbeat(HeartbeatRequest ids) throws TException { getTxnHandler().heartbeat(ids); } @Override public HeartbeatTxnRangeResponse heartbeat_txn_range(HeartbeatTxnRangeRequest rqst) throws TException { return getTxnHandler().heartbeatTxnRange(rqst); } @Deprecated @Override public void compact(CompactionRequest rqst) throws TException { compact2(rqst); } @Override public CompactionResponse compact2(CompactionRequest rqst) throws TException { return getTxnHandler().compact(rqst); } @Override public ShowCompactResponse show_compact(ShowCompactRequest rqst) throws TException { ShowCompactResponse response = getTxnHandler().showCompact(rqst); response.setCompacts(FilterUtils.filterCompactionsIfEnabled(isServerFilterEnabled, filterHook, getDefaultCatalog(conf), response.getCompacts())); return response; } @Override public void flushCache() throws TException { getMS().flushCache(); } @Override public void add_dynamic_partitions(AddDynamicPartitions rqst) throws TException { getTxnHandler().addDynamicPartitions(rqst); } @Override public GetPrincipalsInRoleResponse get_principals_in_role(GetPrincipalsInRoleRequest request) throws TException { incrementCounter("get_principals_in_role"); firePreEvent(new PreAuthorizationCallEvent(this)); Exception ex = null; GetPrincipalsInRoleResponse response = null; try { response = new GetPrincipalsInRoleResponse(getMS().listRoleMembers(request.getRoleName())); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_principals_in_role", ex == null, ex); } return response; } @Override public GetRoleGrantsForPrincipalResponse get_role_grants_for_principal( GetRoleGrantsForPrincipalRequest request) throws TException { incrementCounter("get_role_grants_for_principal"); firePreEvent(new PreAuthorizationCallEvent(this)); Exception ex = null; List<RolePrincipalGrant> roleMaps = null; try { roleMaps = getMS().listRolesWithGrants(request.getPrincipal_name(), request.getPrincipal_type()); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_role_grants_for_principal", ex == null, ex); } //List<RolePrincipalGrant> roleGrantsList = getRolePrincipalGrants(roleMaps); return new GetRoleGrantsForPrincipalResponse(roleMaps); } @Override public AggrStats get_aggr_stats_for(PartitionsStatsRequest request) throws TException { String catName = request.isSetCatName() ? request.getCatName().toLowerCase() : getDefaultCatalog(conf); String dbName = request.getDbName().toLowerCase(); String tblName = request.getTblName().toLowerCase(); startFunction("get_aggr_stats_for", ": table=" + TableName.getQualified(catName, dbName, tblName)); List<String> lowerCaseColNames = new ArrayList<>(request.getColNames().size()); for (String colName : request.getColNames()) { lowerCaseColNames.add(colName.toLowerCase()); } List<String> lowerCasePartNames = new ArrayList<>(request.getPartNames().size()); for (String partName : request.getPartNames()) { lowerCasePartNames.add(lowerCaseConvertPartName(partName)); } AggrStats aggrStats = null; try { aggrStats = getMS().get_aggr_stats_for(catName, dbName, tblName, lowerCasePartNames, lowerCaseColNames, request.getEngine(), request.getValidWriteIdList()); return aggrStats; } finally { endFunction("get_aggr_stats_for", aggrStats == null, null, request.getTblName()); } } @Override public boolean set_aggr_stats_for(SetPartitionsStatsRequest request) throws TException { boolean ret = true; List<ColumnStatistics> csNews = request.getColStats(); if (csNews == null || csNews.isEmpty()) { return ret; } // figure out if it is table level or partition level ColumnStatistics firstColStats = csNews.get(0); ColumnStatisticsDesc statsDesc = firstColStats.getStatsDesc(); String catName = statsDesc.isSetCatName() ? statsDesc.getCatName() : getDefaultCatalog(conf); String dbName = statsDesc.getDbName(); String tableName = statsDesc.getTableName(); List<String> colNames = new ArrayList<>(); for (ColumnStatisticsObj obj : firstColStats.getStatsObj()) { colNames.add(obj.getColName()); } if (statsDesc.isIsTblLevel()) { // there should be only one ColumnStatistics if (request.getColStatsSize() != 1) { throw new MetaException( "Expecting only 1 ColumnStatistics for table's column stats, but find " + request.getColStatsSize()); } if (request.isSetNeedMerge() && request.isNeedMerge()) { return updateTableColumnStatsWithMerge(catName, dbName, tableName, colNames, request); } else { // This is the overwrite case, we do not care about the accuracy. return updateTableColumnStatsInternal(firstColStats, request.getValidWriteIdList(), request.getWriteId()); } } else { // partition level column stats merging // note that we may have two or more duplicate partition names. // see autoColumnStats_2.q under TestMiniLlapLocalCliDriver Map<String, ColumnStatistics> newStatsMap = new HashMap<>(); for (ColumnStatistics csNew : csNews) { String partName = csNew.getStatsDesc().getPartName(); if (newStatsMap.containsKey(partName)) { MetaStoreServerUtils.mergeColStats(csNew, newStatsMap.get(partName)); } newStatsMap.put(partName, csNew); } if (request.isSetNeedMerge() && request.isNeedMerge()) { ret = updatePartColumnStatsWithMerge(catName, dbName, tableName, colNames, newStatsMap, request); } else { // No merge. Table t = getTable(catName, dbName, tableName); // We don't short-circuit on errors here anymore. That can leave acid stats invalid. if (MetastoreConf.getBoolVar(getConf(), ConfVars.TRY_DIRECT_SQL)) { ret = updatePartitionColStatsInBatch(t, newStatsMap, request.getValidWriteIdList(), request.getWriteId()); } else { for (Map.Entry<String, ColumnStatistics> entry : newStatsMap.entrySet()) { // We don't short-circuit on errors here anymore. That can leave acid stats invalid. ret = updatePartitonColStatsInternal(t, entry.getValue(), request.getValidWriteIdList(), request.getWriteId()) && ret; } } } } return ret; } private boolean updatePartColumnStatsWithMerge(String catName, String dbName, String tableName, List<String> colNames, Map<String, ColumnStatistics> newStatsMap, SetPartitionsStatsRequest request) throws MetaException, NoSuchObjectException, InvalidObjectException, InvalidInputException { RawStore ms = getMS(); ms.openTransaction(); boolean isCommitted = false, result = false; try { // a single call to get all column stats for all partitions List<String> partitionNames = new ArrayList<>(); partitionNames.addAll(newStatsMap.keySet()); List<ColumnStatistics> csOlds = ms.getPartitionColumnStatistics(catName, dbName, tableName, partitionNames, colNames, request.getEngine(), request.getValidWriteIdList()); if (newStatsMap.values().size() != csOlds.size()) { // some of the partitions miss stats. LOG.debug("Some of the partitions miss stats."); } Map<String, ColumnStatistics> oldStatsMap = new HashMap<>(); for (ColumnStatistics csOld : csOlds) { oldStatsMap.put(csOld.getStatsDesc().getPartName(), csOld); } // another single call to get all the partition objects List<Partition> partitions = ms.getPartitionsByNames(catName, dbName, tableName, partitionNames); Map<String, Partition> mapToPart = new HashMap<>(); for (int index = 0; index < partitionNames.size(); index++) { mapToPart.put(partitionNames.get(index), partitions.get(index)); } Table t = getTable(catName, dbName, tableName); Map<String, ColumnStatistics> statsMap = new HashMap<>(); boolean useDirectSql = MetastoreConf.getBoolVar(getConf(), ConfVars.TRY_DIRECT_SQL); for (Map.Entry<String, ColumnStatistics> entry : newStatsMap.entrySet()) { ColumnStatistics csNew = entry.getValue(); ColumnStatistics csOld = oldStatsMap.get(entry.getKey()); boolean isInvalidTxnStats = csOld != null && csOld.isSetIsStatsCompliant() && !csOld.isIsStatsCompliant(); Partition part = mapToPart.get(entry.getKey()); if (isInvalidTxnStats) { // No columns can be merged; a shortcut for getMergableCols. csNew.setStatsObj(Lists.newArrayList()); } else { // we first use getParameters() to prune the stats MetaStoreServerUtils.getMergableCols(csNew, part.getParameters()); // we merge those that can be merged if (csOld != null && csOld.getStatsObjSize() != 0 && !csNew.getStatsObj().isEmpty()) { MetaStoreServerUtils.mergeColStats(csNew, csOld); } } if (!csNew.getStatsObj().isEmpty()) { // We don't short-circuit on errors here anymore. That can leave acid stats invalid. if (useDirectSql) { statsMap.put(csNew.getStatsDesc().getPartName(), csNew); } else { result = updatePartitonColStatsInternal(t, csNew, request.getValidWriteIdList(), request.getWriteId()) && result; } } else if (isInvalidTxnStats) { // For now because the stats state is such as it is, we will invalidate everything. // Overall the sematics here are not clear - we could invalide only some columns, but does // that make any physical sense? Could query affect some columns but not others? part.setWriteId(request.getWriteId()); StatsSetupConst.clearColumnStatsState(part.getParameters()); StatsSetupConst.setBasicStatsState(part.getParameters(), StatsSetupConst.FALSE); ms.alterPartition(catName, dbName, tableName, part.getValues(), part, request.getValidWriteIdList()); result = false; } else { // TODO: why doesn't the original call for non acid tables invalidate the stats? LOG.debug("All the column stats " + csNew.getStatsDesc().getPartName() + " are not accurate to merge."); } } ms.commitTransaction(); isCommitted = true; // updatePartitionColStatsInBatch starts/commit transaction internally. As there is no write or select for update // operations is done in this transaction, it is safe to commit it before calling updatePartitionColStatsInBatch. if (!statsMap.isEmpty()) { updatePartitionColStatsInBatch(t, statsMap, request.getValidWriteIdList(), request.getWriteId()); } } finally { if (!isCommitted) { ms.rollbackTransaction(); } } return result; } private boolean updateTableColumnStatsWithMerge(String catName, String dbName, String tableName, List<String> colNames, SetPartitionsStatsRequest request) throws MetaException, NoSuchObjectException, InvalidObjectException, InvalidInputException { ColumnStatistics firstColStats = request.getColStats().get(0); RawStore ms = getMS(); ms.openTransaction(); boolean isCommitted = false, result = false; try { ColumnStatistics csOld = ms.getTableColumnStatistics(catName, dbName, tableName, colNames, request.getEngine(), request.getValidWriteIdList()); // we first use the valid stats list to prune the stats boolean isInvalidTxnStats = csOld != null && csOld.isSetIsStatsCompliant() && !csOld.isIsStatsCompliant(); if (isInvalidTxnStats) { // No columns can be merged; a shortcut for getMergableCols. firstColStats.setStatsObj(Lists.newArrayList()); } else { Table t = getTable(catName, dbName, tableName); MetaStoreServerUtils.getMergableCols(firstColStats, t.getParameters()); // we merge those that can be merged if (csOld != null && csOld.getStatsObjSize() != 0 && !firstColStats.getStatsObj().isEmpty()) { MetaStoreServerUtils.mergeColStats(firstColStats, csOld); } } if (!firstColStats.getStatsObj().isEmpty()) { result = updateTableColumnStatsInternal(firstColStats, request.getValidWriteIdList(), request.getWriteId()); } else if (isInvalidTxnStats) { // For now because the stats state is such as it is, we will invalidate everything. // Overall the sematics here are not clear - we could invalide only some columns, but does // that make any physical sense? Could query affect some columns but not others? Table t = getTable(catName, dbName, tableName); t.setWriteId(request.getWriteId()); StatsSetupConst.clearColumnStatsState(t.getParameters()); StatsSetupConst.setBasicStatsState(t.getParameters(), StatsSetupConst.FALSE); ms.alterTable(catName, dbName, tableName, t, request.getValidWriteIdList()); } else { // TODO: why doesn't the original call for non acid tables invalidate the stats? LOG.debug("All the column stats are not accurate to merge."); result = true; } ms.commitTransaction(); isCommitted = true; } finally { if (!isCommitted) { ms.rollbackTransaction(); } } return result; } private Table getTable(String catName, String dbName, String tableName) throws MetaException, InvalidObjectException { return getTable(catName, dbName, tableName, null); } private Table getTable(String catName, String dbName, String tableName, String writeIdList) throws MetaException, InvalidObjectException { Table t = getMS().getTable(catName, dbName, tableName, writeIdList); if (t == null) { throw new InvalidObjectException(TableName.getQualified(catName, dbName, tableName) + " table not found"); } return t; } @Override public NotificationEventResponse get_next_notification(NotificationEventRequest rqst) throws TException { authorizeProxyPrivilege(); RawStore ms = getMS(); return ms.getNextNotification(rqst); } @Override public CurrentNotificationEventId get_current_notificationEventId() throws TException { authorizeProxyPrivilege(); RawStore ms = getMS(); return ms.getCurrentNotificationEventId(); } @Override public NotificationEventsCountResponse get_notification_events_count(NotificationEventsCountRequest rqst) throws TException { authorizeProxyPrivilege(); RawStore ms = getMS(); return ms.getNotificationEventsCount(rqst); } private void authorizeProxyPrivilege() throws TException { // Skip the auth in embedded mode or if the auth is disabled if (!HiveMetaStore.isMetaStoreRemote() || !MetastoreConf.getBoolVar(conf, ConfVars.EVENT_DB_NOTIFICATION_API_AUTH)) { return; } String user = null; try { user = SecurityUtils.getUGI().getShortUserName(); } catch (Exception ex) { LOG.error("Cannot obtain username", ex); throw new TException(ex); } if (!MetaStoreServerUtils.checkUserHasHostProxyPrivileges(user, conf, getIPAddress())) { LOG.error("Not authorized to make the get_notification_events_count call. You can try to disable " + ConfVars.EVENT_DB_NOTIFICATION_API_AUTH.toString()); throw new TException("User " + user + " is not allowed to perform this API call"); } } @Override public FireEventResponse fire_listener_event(FireEventRequest rqst) throws TException { switch (rqst.getData().getSetField()) { case INSERT_DATA: case INSERT_DATAS: String catName = rqst.isSetCatName() ? rqst.getCatName() : getDefaultCatalog(conf); String dbName = rqst.getDbName(); String tblName = rqst.getTableName(); boolean isSuccessful = rqst.isSuccessful(); List<InsertEvent> events = new ArrayList<>(); if (rqst.getData().isSetInsertData()) { events.add(new InsertEvent(catName, dbName, tblName, rqst.getPartitionVals(), rqst.getData().getInsertData(), isSuccessful, this)); } else { // this is a bulk fire insert event operation // we use the partition values field from the InsertEventRequestData object // instead of the FireEventRequest object for (InsertEventRequestData insertData : rqst.getData().getInsertDatas()) { if (!insertData.isSetPartitionVal()) { throw new MetaException( "Partition values must be set when firing multiple insert events"); } events.add(new InsertEvent(catName, dbName, tblName, insertData.getPartitionVal(), insertData, isSuccessful, this)); } } FireEventResponse response = new FireEventResponse(); for (InsertEvent event : events) { /* * The transactional listener response will be set already on the event, so there is not need * to pass the response to the non-transactional listener. */ MetaStoreListenerNotifier .notifyEvent(transactionalListeners, EventType.INSERT, event); MetaStoreListenerNotifier.notifyEvent(listeners, EventType.INSERT, event); if (event.getParameters() != null && event.getParameters() .containsKey( MetaStoreEventListenerConstants.DB_NOTIFICATION_EVENT_ID_KEY_NAME)) { response.addToEventIds(Long.valueOf(event.getParameters() .get(MetaStoreEventListenerConstants.DB_NOTIFICATION_EVENT_ID_KEY_NAME))); } else { String msg = "Insert event id not generated for "; if (event.getPartitionObj() != null) { msg += "partition " + Arrays .toString(event.getPartitionObj().getValues().toArray()) + " of "; } msg += " of table " + event.getTableObj().getDbName() + "." + event.getTableObj() .getTableName(); LOG.warn(msg); } } return response; default: throw new TException("Event type " + rqst.getData().getSetField().toString() + " not currently supported."); } } @Override public GetFileMetadataByExprResult get_file_metadata_by_expr(GetFileMetadataByExprRequest req) throws TException { GetFileMetadataByExprResult result = new GetFileMetadataByExprResult(); RawStore ms = getMS(); if (!ms.isFileMetadataSupported()) { result.setIsSupported(false); result.setMetadata(Collections.emptyMap()); // Set the required field. return result; } result.setIsSupported(true); List<Long> fileIds = req.getFileIds(); boolean needMetadata = !req.isSetDoGetFooters() || req.isDoGetFooters(); FileMetadataExprType type = req.isSetType() ? req.getType() : FileMetadataExprType.ORC_SARG; ByteBuffer[] metadatas = needMetadata ? new ByteBuffer[fileIds.size()] : null; ByteBuffer[] ppdResults = new ByteBuffer[fileIds.size()]; boolean[] eliminated = new boolean[fileIds.size()]; getMS().getFileMetadataByExpr(fileIds, type, req.getExpr(), metadatas, ppdResults, eliminated); for (int i = 0; i < fileIds.size(); ++i) { if (!eliminated[i] && ppdResults[i] == null) { continue; // No metadata => no ppd. } MetadataPpdResult mpr = new MetadataPpdResult(); ByteBuffer ppdResult = eliminated[i] ? null : handleReadOnlyBufferForThrift(ppdResults[i]); mpr.setIncludeBitset(ppdResult); if (needMetadata) { ByteBuffer metadata = eliminated[i] ? null : handleReadOnlyBufferForThrift(metadatas[i]); mpr.setMetadata(metadata); } result.putToMetadata(fileIds.get(i), mpr); } if (!result.isSetMetadata()) { result.setMetadata(Collections.emptyMap()); // Set the required field. } return result; } @Override public GetFileMetadataResult get_file_metadata(GetFileMetadataRequest req) throws TException { GetFileMetadataResult result = new GetFileMetadataResult(); RawStore ms = getMS(); if (!ms.isFileMetadataSupported()) { result.setIsSupported(false); result.setMetadata(Collections.emptyMap()); // Set the required field. return result; } result.setIsSupported(true); List<Long> fileIds = req.getFileIds(); ByteBuffer[] metadatas = ms.getFileMetadata(fileIds); assert metadatas.length == fileIds.size(); for (int i = 0; i < metadatas.length; ++i) { ByteBuffer bb = metadatas[i]; if (bb == null) { continue; } bb = handleReadOnlyBufferForThrift(bb); result.putToMetadata(fileIds.get(i), bb); } if (!result.isSetMetadata()) { result.setMetadata(Collections.emptyMap()); // Set the required field. } return result; } private ByteBuffer handleReadOnlyBufferForThrift(ByteBuffer bb) { if (!bb.isReadOnly()) { return bb; } // Thrift cannot write read-only buffers... oh well. // TODO: actually thrift never writes to the buffer, so we could use reflection to // unset the unnecessary read-only flag if allocation/copy perf becomes a problem. ByteBuffer copy = ByteBuffer.allocate(bb.capacity()); copy.put(bb); copy.flip(); return copy; } @Override public PutFileMetadataResult put_file_metadata(PutFileMetadataRequest req) throws TException { RawStore ms = getMS(); if (ms.isFileMetadataSupported()) { ms.putFileMetadata(req.getFileIds(), req.getMetadata(), req.getType()); } return new PutFileMetadataResult(); } @Override public ClearFileMetadataResult clear_file_metadata(ClearFileMetadataRequest req) throws TException { getMS().putFileMetadata(req.getFileIds(), null, null); return new ClearFileMetadataResult(); } @Override public CacheFileMetadataResult cache_file_metadata( CacheFileMetadataRequest req) throws TException { RawStore ms = getMS(); if (!ms.isFileMetadataSupported()) { return new CacheFileMetadataResult(false); } String dbName = req.getDbName(), tblName = req.getTblName(), partName = req.isSetPartName() ? req.getPartName() : null; boolean isAllPart = req.isSetIsAllParts() && req.isIsAllParts(); ms.openTransaction(); boolean success = false; try { Table tbl = ms.getTable(DEFAULT_CATALOG_NAME, dbName, tblName); if (tbl == null) { throw new NoSuchObjectException(dbName + "." + tblName + " not found"); } boolean isPartitioned = tbl.isSetPartitionKeys() && tbl.getPartitionKeysSize() > 0; String tableInputFormat = tbl.isSetSd() ? tbl.getSd().getInputFormat() : null; if (!isPartitioned) { if (partName != null || isAllPart) { throw new MetaException("Table is not partitioned"); } if (!tbl.isSetSd() || !tbl.getSd().isSetLocation()) { throw new MetaException( "Table does not have storage location; this operation is not supported on views"); } FileMetadataExprType type = expressionProxy.getMetadataType(tableInputFormat); if (type == null) { throw new MetaException("The operation is not supported for " + tableInputFormat); } fileMetadataManager.queueCacheMetadata(tbl.getSd().getLocation(), type); success = true; } else { List<String> partNames; if (partName != null) { partNames = Lists.newArrayList(partName); } else if (isAllPart) { partNames = ms.listPartitionNames(DEFAULT_CATALOG_NAME, dbName, tblName, (short)-1); } else { throw new MetaException("Table is partitioned"); } int batchSize = MetastoreConf.getIntVar( conf, ConfVars.BATCH_RETRIEVE_OBJECTS_MAX); int index = 0; int successCount = 0, failCount = 0; HashSet<String> failFormats = null; while (index < partNames.size()) { int currentBatchSize = Math.min(batchSize, partNames.size() - index); List<String> nameBatch = partNames.subList(index, index + currentBatchSize); index += currentBatchSize; List<Partition> parts = ms.getPartitionsByNames(DEFAULT_CATALOG_NAME, dbName, tblName, nameBatch); for (Partition part : parts) { if (!part.isSetSd() || !part.getSd().isSetLocation()) { throw new MetaException("Partition does not have storage location;" + " this operation is not supported on views"); } String inputFormat = part.getSd().isSetInputFormat() ? part.getSd().getInputFormat() : tableInputFormat; FileMetadataExprType type = expressionProxy.getMetadataType(inputFormat); if (type == null) { ++failCount; if (failFormats == null) { failFormats = new HashSet<>(); } failFormats.add(inputFormat); } else { ++successCount; fileMetadataManager.queueCacheMetadata(part.getSd().getLocation(), type); } } } success = true; // Regardless of the following exception if (failCount > 0) { String errorMsg = "The operation failed for " + failCount + " partitions and " + "succeeded for " + successCount + " partitions; unsupported formats: "; boolean isFirst = true; for (String s : failFormats) { if (!isFirst) { errorMsg += ", "; } isFirst = false; errorMsg += s; } throw new MetaException(errorMsg); } } } finally { if (success) { if (!ms.commitTransaction()) { throw new MetaException("Failed to commit"); } } else { ms.rollbackTransaction(); } } return new CacheFileMetadataResult(true); } @VisibleForTesting void updateMetrics() throws MetaException { if (databaseCount != null) { tableCount.set(getMS().getTableCount()); partCount.set(getMS().getPartitionCount()); databaseCount.set(getMS().getDatabaseCount()); } } @Override public PrimaryKeysResponse get_primary_keys(PrimaryKeysRequest request) throws TException { request.setCatName(request.isSetCatName() ? request.getCatName() : getDefaultCatalog(conf)); startTableFunction("get_primary_keys", request.getCatName(), request.getDb_name(), request.getTbl_name()); List<SQLPrimaryKey> ret = null; Exception ex = null; try { ret = getMS().getPrimaryKeys(request); } catch (Exception e) { ex = e; throwMetaException(e); } finally { endFunction("get_primary_keys", ret != null, ex, request.getTbl_name()); } return new PrimaryKeysResponse(ret); } @Override public ForeignKeysResponse get_foreign_keys(ForeignKeysRequest request) throws TException { request.setCatName(request.isSetCatName() ? request.getCatName() : getDefaultCatalog(conf)); startFunction("get_foreign_keys", " : parentdb=" + request.getParent_db_name() + " parenttbl=" + request.getParent_tbl_name() + " foreigndb=" + request.getForeign_db_name() + " foreigntbl=" + request.getForeign_tbl_name()); List<SQLForeignKey> ret = null; Exception ex = null; try { ret = getMS().getForeignKeys(request); } catch (Exception e) { ex = e; throwMetaException(e); } finally { endFunction("get_foreign_keys", ret != null, ex, request.getForeign_tbl_name()); } return new ForeignKeysResponse(ret); } @Override public UniqueConstraintsResponse get_unique_constraints(UniqueConstraintsRequest request) throws TException { request.setCatName(request.isSetCatName() ? request.getCatName() : getDefaultCatalog(conf)); startTableFunction("get_unique_constraints", request.getCatName(), request.getDb_name(), request.getTbl_name()); List<SQLUniqueConstraint> ret = null; Exception ex = null; try { ret = getMS().getUniqueConstraints(request); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_unique_constraints", ret != null, ex, request.getTbl_name()); } return new UniqueConstraintsResponse(ret); } @Override public NotNullConstraintsResponse get_not_null_constraints(NotNullConstraintsRequest request) throws TException { request.setCatName(request.isSetCatName() ? request.getCatName() : getDefaultCatalog(conf)); startTableFunction("get_not_null_constraints", request.getCatName(), request.getDb_name(), request.getTbl_name()); List<SQLNotNullConstraint> ret = null; Exception ex = null; try { ret = getMS().getNotNullConstraints(request); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_not_null_constraints", ret != null, ex, request.getTbl_name()); } return new NotNullConstraintsResponse(ret); } @Override public DefaultConstraintsResponse get_default_constraints(DefaultConstraintsRequest request) throws TException { request.setCatName(request.isSetCatName() ? request.getCatName() : getDefaultCatalog(conf)); startTableFunction("get_default_constraints", request.getCatName(), request.getDb_name(), request.getTbl_name()); List<SQLDefaultConstraint> ret = null; Exception ex = null; try { ret = getMS().getDefaultConstraints(request); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_default_constraints", ret != null, ex, request.getTbl_name()); } return new DefaultConstraintsResponse(ret); } @Override public CheckConstraintsResponse get_check_constraints(CheckConstraintsRequest request) throws TException { request.setCatName(request.isSetCatName() ? request.getCatName() : getDefaultCatalog(conf)); startTableFunction("get_check_constraints", request.getCatName(), request.getDb_name(), request.getTbl_name()); List<SQLCheckConstraint> ret = null; Exception ex = null; try { ret = getMS().getCheckConstraints(request); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_check_constraints", ret != null, ex, request.getTbl_name()); } return new CheckConstraintsResponse(ret); } /** * Api to fetch all table constraints at once. * @param request it consist of catalog name, database name and table name to identify the table in metastore * @return all constraints attached to given table * @throws TException */ @Override public AllTableConstraintsResponse get_all_table_constraints(AllTableConstraintsRequest request) throws TException, MetaException, NoSuchObjectException { request.setCatName(request.isSetCatName() ? request.getCatName() : getDefaultCatalog(conf)); startTableFunction("get_all_table_constraints", request.getCatName(), request.getDbName(), request.getTblName()); SQLAllTableConstraints ret = null; Exception ex = null; try { ret = getMS().getAllTableConstraints(request); } catch (Exception e) { ex = e; throwMetaException(e); } finally { endFunction("get_all_table_constraints", ret != null, ex, request.getTblName()); } return new AllTableConstraintsResponse(ret); } @Override public String get_metastore_db_uuid() throws TException { try { return getMS().getMetastoreDbUuid(); } catch (MetaException e) { LOG.error("Exception thrown while querying metastore db uuid", e); throw e; } } @Override public WMCreateResourcePlanResponse create_resource_plan(WMCreateResourcePlanRequest request) throws AlreadyExistsException, InvalidObjectException, MetaException, TException { int defaultPoolSize = MetastoreConf.getIntVar( conf, MetastoreConf.ConfVars.WM_DEFAULT_POOL_SIZE); WMResourcePlan plan = request.getResourcePlan(); if (defaultPoolSize > 0 && plan.isSetQueryParallelism()) { // If the default pool is not disabled, override the size with the specified parallelism. defaultPoolSize = plan.getQueryParallelism(); } try { getMS().createResourcePlan(plan, request.getCopyFrom(), defaultPoolSize); return new WMCreateResourcePlanResponse(); } catch (MetaException e) { LOG.error("Exception while trying to persist resource plan", e); throw e; } } @Override public WMGetResourcePlanResponse get_resource_plan(WMGetResourcePlanRequest request) throws NoSuchObjectException, MetaException, TException { try { WMFullResourcePlan rp = getMS().getResourcePlan(request.getResourcePlanName(), request.getNs()); WMGetResourcePlanResponse resp = new WMGetResourcePlanResponse(); resp.setResourcePlan(rp); return resp; } catch (MetaException e) { LOG.error("Exception while trying to retrieve resource plan", e); throw e; } } @Override public WMGetAllResourcePlanResponse get_all_resource_plans(WMGetAllResourcePlanRequest request) throws MetaException, TException { try { WMGetAllResourcePlanResponse resp = new WMGetAllResourcePlanResponse(); resp.setResourcePlans(getMS().getAllResourcePlans(request.getNs())); return resp; } catch (MetaException e) { LOG.error("Exception while trying to retrieve resource plans", e); throw e; } } @Override public WMAlterResourcePlanResponse alter_resource_plan(WMAlterResourcePlanRequest request) throws NoSuchObjectException, InvalidOperationException, MetaException, TException { try { if (((request.isIsEnableAndActivate() ? 1 : 0) + (request.isIsReplace() ? 1 : 0) + (request.isIsForceDeactivate() ? 1 : 0)) > 1) { throw new MetaException("Invalid request; multiple flags are set"); } WMAlterResourcePlanResponse response = new WMAlterResourcePlanResponse(); // This method will only return full resource plan when activating one, // to give the caller the result atomically with the activation. WMFullResourcePlan fullPlanAfterAlter = getMS().alterResourcePlan( request.getResourcePlanName(), request.getNs(), request.getResourcePlan(), request.isIsEnableAndActivate(), request.isIsForceDeactivate(), request.isIsReplace()); if (fullPlanAfterAlter != null) { response.setFullResourcePlan(fullPlanAfterAlter); } return response; } catch (MetaException e) { LOG.error("Exception while trying to alter resource plan", e); throw e; } } @Override public WMGetActiveResourcePlanResponse get_active_resource_plan( WMGetActiveResourcePlanRequest request) throws MetaException, TException { try { WMGetActiveResourcePlanResponse response = new WMGetActiveResourcePlanResponse(); response.setResourcePlan(getMS().getActiveResourcePlan(request.getNs())); return response; } catch (MetaException e) { LOG.error("Exception while trying to get active resource plan", e); throw e; } } @Override public WMValidateResourcePlanResponse validate_resource_plan(WMValidateResourcePlanRequest request) throws NoSuchObjectException, MetaException, TException { try { return getMS().validateResourcePlan(request.getResourcePlanName(), request.getNs()); } catch (MetaException e) { LOG.error("Exception while trying to validate resource plan", e); throw e; } } @Override public WMDropResourcePlanResponse drop_resource_plan(WMDropResourcePlanRequest request) throws NoSuchObjectException, InvalidOperationException, MetaException, TException { try { getMS().dropResourcePlan(request.getResourcePlanName(), request.getNs()); return new WMDropResourcePlanResponse(); } catch (MetaException e) { LOG.error("Exception while trying to drop resource plan", e); throw e; } } @Override public WMCreateTriggerResponse create_wm_trigger(WMCreateTriggerRequest request) throws AlreadyExistsException, InvalidObjectException, MetaException, TException { try { getMS().createWMTrigger(request.getTrigger()); return new WMCreateTriggerResponse(); } catch (MetaException e) { LOG.error("Exception while trying to create trigger", e); throw e; } } @Override public WMAlterTriggerResponse alter_wm_trigger(WMAlterTriggerRequest request) throws NoSuchObjectException, InvalidObjectException, MetaException, TException { try { getMS().alterWMTrigger(request.getTrigger()); return new WMAlterTriggerResponse(); } catch (MetaException e) { LOG.error("Exception while trying to alter trigger", e); throw e; } } @Override public WMDropTriggerResponse drop_wm_trigger(WMDropTriggerRequest request) throws NoSuchObjectException, InvalidOperationException, MetaException, TException { try { getMS().dropWMTrigger(request.getResourcePlanName(), request.getTriggerName(), request.getNs()); return new WMDropTriggerResponse(); } catch (MetaException e) { LOG.error("Exception while trying to drop trigger.", e); throw e; } } @Override public WMGetTriggersForResourePlanResponse get_triggers_for_resourceplan( WMGetTriggersForResourePlanRequest request) throws NoSuchObjectException, MetaException, TException { try { List<WMTrigger> triggers = getMS().getTriggersForResourcePlan(request.getResourcePlanName(), request.getNs()); WMGetTriggersForResourePlanResponse response = new WMGetTriggersForResourePlanResponse(); response.setTriggers(triggers); return response; } catch (MetaException e) { LOG.error("Exception while trying to retrieve triggers plans", e); throw e; } } @Override public WMAlterPoolResponse alter_wm_pool(WMAlterPoolRequest request) throws AlreadyExistsException, NoSuchObjectException, InvalidObjectException, MetaException, TException { try { getMS().alterPool(request.getPool(), request.getPoolPath()); return new WMAlterPoolResponse(); } catch (MetaException e) { LOG.error("Exception while trying to alter WMPool", e); throw e; } } @Override public WMCreatePoolResponse create_wm_pool(WMCreatePoolRequest request) throws AlreadyExistsException, NoSuchObjectException, InvalidObjectException, MetaException, TException { try { getMS().createPool(request.getPool()); return new WMCreatePoolResponse(); } catch (MetaException e) { LOG.error("Exception while trying to create WMPool", e); throw e; } } @Override public WMDropPoolResponse drop_wm_pool(WMDropPoolRequest request) throws NoSuchObjectException, InvalidOperationException, MetaException, TException { try { getMS().dropWMPool(request.getResourcePlanName(), request.getPoolPath(), request.getNs()); return new WMDropPoolResponse(); } catch (MetaException e) { LOG.error("Exception while trying to drop WMPool", e); throw e; } } @Override public WMCreateOrUpdateMappingResponse create_or_update_wm_mapping( WMCreateOrUpdateMappingRequest request) throws AlreadyExistsException, NoSuchObjectException, InvalidObjectException, MetaException, TException { try { getMS().createOrUpdateWMMapping(request.getMapping(), request.isUpdate()); return new WMCreateOrUpdateMappingResponse(); } catch (MetaException e) { LOG.error("Exception while trying to create or update WMMapping", e); throw e; } } @Override public WMDropMappingResponse drop_wm_mapping(WMDropMappingRequest request) throws NoSuchObjectException, InvalidOperationException, MetaException, TException { try { getMS().dropWMMapping(request.getMapping()); return new WMDropMappingResponse(); } catch (MetaException e) { LOG.error("Exception while trying to drop WMMapping", e); throw e; } } @Override public WMCreateOrDropTriggerToPoolMappingResponse create_or_drop_wm_trigger_to_pool_mapping( WMCreateOrDropTriggerToPoolMappingRequest request) throws AlreadyExistsException, NoSuchObjectException, InvalidObjectException, MetaException, TException { try { if (request.isDrop()) { getMS().dropWMTriggerToPoolMapping(request.getResourcePlanName(), request.getTriggerName(), request.getPoolPath(), request.getNs()); } else { getMS().createWMTriggerToPoolMapping(request.getResourcePlanName(), request.getTriggerName(), request.getPoolPath(), request.getNs()); } return new WMCreateOrDropTriggerToPoolMappingResponse(); } catch (MetaException e) { LOG.error("Exception while trying to create or drop pool mappings", e); throw e; } } @Override public void create_ischema(ISchema schema) throws TException { startFunction("create_ischema", ": " + schema.getName()); boolean success = false; Exception ex = null; RawStore ms = getMS(); try { firePreEvent(new PreCreateISchemaEvent(this, schema)); Map<String, String> transactionalListenersResponses = Collections.emptyMap(); ms.openTransaction(); try { ms.createISchema(schema); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.CREATE_ISCHEMA, new CreateISchemaEvent(true, this, schema)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.CREATE_ISCHEMA, new CreateISchemaEvent(success, this, schema), null, transactionalListenersResponses, ms); } } } catch (MetaException|AlreadyExistsException e) { LOG.error("Caught exception creating schema", e); ex = e; throw e; } finally { endFunction("create_ischema", success, ex); } } @Override public void alter_ischema(AlterISchemaRequest rqst) throws TException { startFunction("alter_ischema", ": " + rqst); boolean success = false; Exception ex = null; RawStore ms = getMS(); try { ISchema oldSchema = ms.getISchema(rqst.getName()); if (oldSchema == null) { throw new NoSuchObjectException("Could not find schema " + rqst.getName()); } firePreEvent(new PreAlterISchemaEvent(this, oldSchema, rqst.getNewSchema())); Map<String, String> transactionalListenersResponses = Collections.emptyMap(); ms.openTransaction(); try { ms.alterISchema(rqst.getName(), rqst.getNewSchema()); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ALTER_ISCHEMA, new AlterISchemaEvent(true, this, oldSchema, rqst.getNewSchema())); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_ISCHEMA, new AlterISchemaEvent(success, this, oldSchema, rqst.getNewSchema()), null, transactionalListenersResponses, ms); } } } catch (MetaException|NoSuchObjectException e) { LOG.error("Caught exception altering schema", e); ex = e; throw e; } finally { endFunction("alter_ischema", success, ex); } } @Override public ISchema get_ischema(ISchemaName schemaName) throws TException { startFunction("get_ischema", ": " + schemaName); Exception ex = null; ISchema schema = null; try { schema = getMS().getISchema(schemaName); if (schema == null) { throw new NoSuchObjectException("No schema named " + schemaName + " exists"); } firePreEvent(new PreReadISchemaEvent(this, schema)); return schema; } catch (MetaException e) { LOG.error("Caught exception getting schema", e); ex = e; throw e; } finally { endFunction("get_ischema", schema != null, ex); } } @Override public void drop_ischema(ISchemaName schemaName) throws TException { startFunction("drop_ischema", ": " + schemaName); Exception ex = null; boolean success = false; RawStore ms = getMS(); try { // look for any valid versions. This will also throw NoSuchObjectException if the schema // itself doesn't exist, which is what we want. SchemaVersion latest = ms.getLatestSchemaVersion(schemaName); if (latest != null) { ex = new InvalidOperationException("Schema " + schemaName + " cannot be dropped, it has" + " at least one valid version"); throw (InvalidObjectException)ex; } ISchema schema = ms.getISchema(schemaName); firePreEvent(new PreDropISchemaEvent(this, schema)); Map<String, String> transactionalListenersResponses = Collections.emptyMap(); ms.openTransaction(); try { ms.dropISchema(schemaName); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DROP_ISCHEMA, new DropISchemaEvent(true, this, schema)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_ISCHEMA, new DropISchemaEvent(success, this, schema), null, transactionalListenersResponses, ms); } } } catch (MetaException|NoSuchObjectException e) { LOG.error("Caught exception dropping schema", e); ex = e; throw e; } finally { endFunction("drop_ischema", success, ex); } } @Override public void add_schema_version(SchemaVersion schemaVersion) throws TException { startFunction("add_schema_version", ": " + schemaVersion); boolean success = false; Exception ex = null; RawStore ms = getMS(); try { // Make sure the referenced schema exists if (ms.getISchema(schemaVersion.getSchema()) == null) { throw new NoSuchObjectException("No schema named " + schemaVersion.getSchema()); } firePreEvent(new PreAddSchemaVersionEvent(this, schemaVersion)); Map<String, String> transactionalListenersResponses = Collections.emptyMap(); ms.openTransaction(); try { ms.addSchemaVersion(schemaVersion); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_SCHEMA_VERSION, new AddSchemaVersionEvent(true, this, schemaVersion)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_SCHEMA_VERSION, new AddSchemaVersionEvent(success, this, schemaVersion), null, transactionalListenersResponses, ms); } } } catch (MetaException|AlreadyExistsException e) { LOG.error("Caught exception adding schema version", e); ex = e; throw e; } finally { endFunction("add_schema_version", success, ex); } } @Override public SchemaVersion get_schema_version(SchemaVersionDescriptor version) throws TException { startFunction("get_schema_version", ": " + version); Exception ex = null; SchemaVersion schemaVersion = null; try { schemaVersion = getMS().getSchemaVersion(version); if (schemaVersion == null) { throw new NoSuchObjectException("No schema version " + version + "exists"); } firePreEvent(new PreReadhSchemaVersionEvent(this, Collections.singletonList(schemaVersion))); return schemaVersion; } catch (MetaException e) { LOG.error("Caught exception getting schema version", e); ex = e; throw e; } finally { endFunction("get_schema_version", schemaVersion != null, ex); } } @Override public SchemaVersion get_schema_latest_version(ISchemaName schemaName) throws TException { startFunction("get_latest_schema_version", ": " + schemaName); Exception ex = null; SchemaVersion schemaVersion = null; try { schemaVersion = getMS().getLatestSchemaVersion(schemaName); if (schemaVersion == null) { throw new NoSuchObjectException("No versions of schema " + schemaName + "exist"); } firePreEvent(new PreReadhSchemaVersionEvent(this, Collections.singletonList(schemaVersion))); return schemaVersion; } catch (MetaException e) { LOG.error("Caught exception getting latest schema version", e); ex = e; throw e; } finally { endFunction("get_latest_schema_version", schemaVersion != null, ex); } } @Override public List<SchemaVersion> get_schema_all_versions(ISchemaName schemaName) throws TException { startFunction("get_all_schema_versions", ": " + schemaName); Exception ex = null; List<SchemaVersion> schemaVersions = null; try { schemaVersions = getMS().getAllSchemaVersion(schemaName); if (schemaVersions == null) { throw new NoSuchObjectException("No versions of schema " + schemaName + "exist"); } firePreEvent(new PreReadhSchemaVersionEvent(this, schemaVersions)); return schemaVersions; } catch (MetaException e) { LOG.error("Caught exception getting all schema versions", e); ex = e; throw e; } finally { endFunction("get_all_schema_versions", schemaVersions != null, ex); } } @Override public void drop_schema_version(SchemaVersionDescriptor version) throws TException { startFunction("drop_schema_version", ": " + version); Exception ex = null; boolean success = false; RawStore ms = getMS(); try { SchemaVersion schemaVersion = ms.getSchemaVersion(version); if (schemaVersion == null) { throw new NoSuchObjectException("No schema version " + version); } firePreEvent(new PreDropSchemaVersionEvent(this, schemaVersion)); Map<String, String> transactionalListenersResponses = Collections.emptyMap(); ms.openTransaction(); try { ms.dropSchemaVersion(version); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DROP_SCHEMA_VERSION, new DropSchemaVersionEvent(true, this, schemaVersion)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_SCHEMA_VERSION, new DropSchemaVersionEvent(success, this, schemaVersion), null, transactionalListenersResponses, ms); } } } catch (MetaException|NoSuchObjectException e) { LOG.error("Caught exception dropping schema version", e); ex = e; throw e; } finally { endFunction("drop_schema_version", success, ex); } } @Override public FindSchemasByColsResp get_schemas_by_cols(FindSchemasByColsRqst rqst) throws TException { startFunction("get_schemas_by_cols"); Exception ex = null; List<SchemaVersion> schemaVersions = Collections.emptyList(); try { schemaVersions = getMS().getSchemaVersionsByColumns(rqst.getColName(), rqst.getColNamespace(), rqst.getType()); firePreEvent(new PreReadhSchemaVersionEvent(this, schemaVersions)); final List<SchemaVersionDescriptor> entries = new ArrayList<>(schemaVersions.size()); schemaVersions.forEach(schemaVersion -> entries.add( new SchemaVersionDescriptor(schemaVersion.getSchema(), schemaVersion.getVersion()))); return new FindSchemasByColsResp(entries); } catch (MetaException e) { LOG.error("Caught exception doing schema version query", e); ex = e; throw e; } finally { endFunction("get_schemas_by_cols", !schemaVersions.isEmpty(), ex); } } @Override public void map_schema_version_to_serde(MapSchemaVersionToSerdeRequest rqst) throws TException { startFunction("map_schema_version_to_serde, :" + rqst); boolean success = false; Exception ex = null; RawStore ms = getMS(); try { SchemaVersion oldSchemaVersion = ms.getSchemaVersion(rqst.getSchemaVersion()); if (oldSchemaVersion == null) { throw new NoSuchObjectException("No schema version " + rqst.getSchemaVersion()); } SerDeInfo serde = ms.getSerDeInfo(rqst.getSerdeName()); if (serde == null) { throw new NoSuchObjectException("No SerDe named " + rqst.getSerdeName()); } SchemaVersion newSchemaVersion = new SchemaVersion(oldSchemaVersion); newSchemaVersion.setSerDe(serde); firePreEvent(new PreAlterSchemaVersionEvent(this, oldSchemaVersion, newSchemaVersion)); Map<String, String> transactionalListenersResponses = Collections.emptyMap(); ms.openTransaction(); try { ms.alterSchemaVersion(rqst.getSchemaVersion(), newSchemaVersion); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ALTER_SCHEMA_VERSION, new AlterSchemaVersionEvent(true, this, oldSchemaVersion, newSchemaVersion)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_SCHEMA_VERSION, new AlterSchemaVersionEvent(success, this, oldSchemaVersion, newSchemaVersion), null, transactionalListenersResponses, ms); } } } catch (MetaException|NoSuchObjectException e) { LOG.error("Caught exception mapping schema version to serde", e); ex = e; throw e; } finally { endFunction("map_schema_version_to_serde", success, ex); } } @Override public void set_schema_version_state(SetSchemaVersionStateRequest rqst) throws TException { startFunction("set_schema_version_state, :" + rqst); boolean success = false; Exception ex = null; RawStore ms = getMS(); try { SchemaVersion oldSchemaVersion = ms.getSchemaVersion(rqst.getSchemaVersion()); if (oldSchemaVersion == null) { throw new NoSuchObjectException("No schema version " + rqst.getSchemaVersion()); } SchemaVersion newSchemaVersion = new SchemaVersion(oldSchemaVersion); newSchemaVersion.setState(rqst.getState()); firePreEvent(new PreAlterSchemaVersionEvent(this, oldSchemaVersion, newSchemaVersion)); Map<String, String> transactionalListenersResponses = Collections.emptyMap(); ms.openTransaction(); try { ms.alterSchemaVersion(rqst.getSchemaVersion(), newSchemaVersion); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ALTER_SCHEMA_VERSION, new AlterSchemaVersionEvent(true, this, oldSchemaVersion, newSchemaVersion)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_SCHEMA_VERSION, new AlterSchemaVersionEvent(success, this, oldSchemaVersion, newSchemaVersion), null, transactionalListenersResponses, ms); } } } catch (MetaException|NoSuchObjectException e) { LOG.error("Caught exception changing schema version state", e); ex = e; throw e; } finally { endFunction("set_schema_version_state", success, ex); } } @Override public void add_serde(SerDeInfo serde) throws TException { startFunction("create_serde", ": " + serde.getName()); Exception ex = null; boolean success = false; RawStore ms = getMS(); try { ms.openTransaction(); ms.addSerde(serde); success = ms.commitTransaction(); } catch (MetaException|AlreadyExistsException e) { LOG.error("Caught exception creating serde", e); ex = e; throw e; } finally { if (!success) { ms.rollbackTransaction(); } endFunction("create_serde", success, ex); } } @Override public SerDeInfo get_serde(GetSerdeRequest rqst) throws TException { startFunction("get_serde", ": " + rqst); Exception ex = null; SerDeInfo serde = null; try { serde = getMS().getSerDeInfo(rqst.getSerdeName()); if (serde == null) { throw new NoSuchObjectException("No serde named " + rqst.getSerdeName() + " exists"); } return serde; } catch (MetaException e) { LOG.error("Caught exception getting serde", e); ex = e; throw e; } finally { endFunction("get_serde", serde != null, ex); } } @Override public LockResponse get_lock_materialization_rebuild(String dbName, String tableName, long txnId) throws TException { return getTxnHandler().lockMaterializationRebuild(dbName, tableName, txnId); } @Override public boolean heartbeat_lock_materialization_rebuild(String dbName, String tableName, long txnId) throws TException { return getTxnHandler().heartbeatLockMaterializationRebuild(dbName, tableName, txnId); } @Override public void add_runtime_stats(RuntimeStat stat) throws TException { startFunction("store_runtime_stats"); Exception ex = null; boolean success = false; RawStore ms = getMS(); try { ms.openTransaction(); ms.addRuntimeStat(stat); success = ms.commitTransaction(); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { if (!success) { ms.rollbackTransaction(); } endFunction("store_runtime_stats", success, ex); } } @Override public List<RuntimeStat> get_runtime_stats(GetRuntimeStatsRequest rqst) throws TException { startFunction("get_runtime_stats"); Exception ex = null; try { List<RuntimeStat> res = getMS().getRuntimeStats(rqst.getMaxWeight(), rqst.getMaxCreateTime()); return res; } catch (MetaException e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("get_runtime_stats", ex == null, ex); } } @Override public ScheduledQueryPollResponse scheduled_query_poll(ScheduledQueryPollRequest request) throws MetaException, TException { startFunction("scheduled_query_poll"); Exception ex = null; try { RawStore ms = getMS(); return ms.scheduledQueryPoll(request); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("scheduled_query_poll", ex == null, ex); } } @Override public void scheduled_query_maintenance(ScheduledQueryMaintenanceRequest request) throws MetaException, TException { startFunction("scheduled_query_poll"); Exception ex = null; try { RawStore ms = getMS(); ms.scheduledQueryMaintenance(request); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("scheduled_query_poll", ex == null, ex); } } @Override public void scheduled_query_progress(ScheduledQueryProgressInfo info) throws MetaException, TException { startFunction("scheduled_query_poll"); Exception ex = null; try { RawStore ms = getMS(); ms.scheduledQueryProgress(info); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("scheduled_query_poll", ex == null, ex); } } @Override public ScheduledQuery get_scheduled_query(ScheduledQueryKey scheduleKey) throws TException { startFunction("get_scheduled_query"); Exception ex = null; try { return getMS().getScheduledQuery(scheduleKey); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("get_scheduled_query", ex == null, ex); } } @Override public void add_replication_metrics(ReplicationMetricList replicationMetricList) throws MetaException{ startFunction("add_replication_metrics"); Exception ex = null; try { getMS().addReplicationMetrics(replicationMetricList); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("add_replication_metrics", ex == null, ex); } } @Override public ReplicationMetricList get_replication_metrics(GetReplicationMetricsRequest getReplicationMetricsRequest) throws MetaException{ startFunction("get_replication_metrics"); Exception ex = null; try { return getMS().getReplicationMetrics(getReplicationMetricsRequest); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("get_replication_metrics", ex == null, ex); } } @Override public void create_stored_procedure(StoredProcedure proc) throws NoSuchObjectException, MetaException { startFunction("create_stored_procedure"); Exception ex = null; throwUnsupportedExceptionIfRemoteDB(proc.getDbName(), "create_stored_procedure"); try { getMS().createOrUpdateStoredProcedure(proc); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("create_stored_procedure", ex == null, ex); } } public StoredProcedure get_stored_procedure(StoredProcedureRequest request) throws MetaException, NoSuchObjectException { startFunction("get_stored_procedure"); Exception ex = null; try { StoredProcedure proc = getMS().getStoredProcedure(request.getCatName(), request.getDbName(), request.getProcName()); if (proc == null) { throw new NoSuchObjectException( "HPL/SQL StoredProcedure " + request.getDbName() + "." + request.getProcName() + " does not exist"); } return proc; } catch (Exception e) { if (!(e instanceof NoSuchObjectException)) { LOG.error("Caught exception", e); } ex = e; throw e; } finally { endFunction("get_stored_procedure", ex == null, ex); } } @Override public void drop_stored_procedure(StoredProcedureRequest request) throws MetaException { startFunction("drop_stored_procedure"); Exception ex = null; try { getMS().dropStoredProcedure(request.getCatName(), request.getDbName(), request.getProcName()); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("drop_stored_procedure", ex == null, ex); } } @Override public List<String> get_all_stored_procedures(ListStoredProcedureRequest request) throws MetaException { startFunction("get_all_stored_procedures"); Exception ex = null; try { return getMS().getAllStoredProcedures(request); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("get_all_stored_procedures", ex == null, ex); } } public Package find_package(GetPackageRequest request) throws MetaException, NoSuchObjectException { startFunction("find_package"); Exception ex = null; try { Package pkg = getMS().findPackage(request); if (pkg == null) { throw new NoSuchObjectException( "HPL/SQL package " + request.getDbName() + "." + request.getPackageName() + " does not exist"); } return pkg; } catch (Exception e) { if (!(e instanceof NoSuchObjectException)) { LOG.error("Caught exception", e); } ex = e; throw e; } finally { endFunction("find_package", ex == null, ex); } } public void add_package(AddPackageRequest request) throws MetaException, NoSuchObjectException { startFunction("add_package"); Exception ex = null; try { getMS().addPackage(request); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("add_package", ex == null, ex); } } public List<String> get_all_packages(ListPackageRequest request) throws MetaException { startFunction("get_all_packages"); Exception ex = null; try { return getMS().listPackages(request); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("get_all_packages", ex == null, ex); } } public void drop_package(DropPackageRequest request) throws MetaException { startFunction("drop_package"); Exception ex = null; try { getMS().dropPackage(request); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("drop_package", ex == null, ex); } } @Override public List<WriteEventInfo> get_all_write_event_info(GetAllWriteEventInfoRequest request) throws MetaException { startFunction("get_all_write_event_info"); Exception ex = null; try { List<WriteEventInfo> writeEventInfoList = getMS().getAllWriteEventInfo(request.getTxnId(), request.getDbName(), request.getTableName()); return writeEventInfoList == null ? Collections.emptyList() : writeEventInfoList; } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("get_all_write_event_info", ex == null, ex); } } }
standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.metastore; import com.codahale.metrics.Counter; import com.facebook.fb303.FacebookBase; import com.facebook.fb303.fb_status; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Splitter; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.collect.Lists; import com.google.common.util.concurrent.Striped; import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.commons.collections.CollectionUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.AcidConstants; import org.apache.hadoop.hive.common.AcidMetaDataFile; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.ValidReaderWriteIdList; import org.apache.hadoop.hive.common.ValidWriteIdList; import org.apache.hadoop.hive.common.repl.ReplConst; import org.apache.hadoop.hive.metastore.api.*; import org.apache.hadoop.hive.metastore.api.Package; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars; import org.apache.hadoop.hive.metastore.dataconnector.DataConnectorProviderFactory; import org.apache.hadoop.hive.metastore.events.*; import org.apache.hadoop.hive.metastore.messaging.EventMessage; import org.apache.hadoop.hive.metastore.messaging.EventMessage.EventType; import org.apache.hadoop.hive.metastore.metrics.Metrics; import org.apache.hadoop.hive.metastore.metrics.MetricsConstants; import org.apache.hadoop.hive.metastore.metrics.PerfLogger; import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy; import org.apache.hadoop.hive.metastore.txn.CompactionInfo; import org.apache.hadoop.hive.metastore.txn.TxnStore; import org.apache.hadoop.hive.metastore.txn.TxnUtils; import org.apache.hadoop.hive.metastore.utils.FileUtils; import org.apache.hadoop.hive.metastore.utils.FilterUtils; import org.apache.hadoop.hive.metastore.utils.HdfsUtils; import org.apache.hadoop.hive.metastore.utils.JavaUtils; import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.metastore.utils.MetastoreVersionInfo; import org.apache.hadoop.hive.metastore.utils.SecurityUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.ReflectionUtils; import org.apache.thrift.TException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.jdo.JDOException; import java.io.IOException; import java.lang.reflect.UndeclaredThrowableException; import java.nio.ByteBuffer; import java.security.PrivilegedExceptionAction; import java.util.AbstractMap; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.PriorityQueue; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; import java.util.regex.Pattern; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.join; import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.TABLE_IS_CTAS; import static org.apache.hadoop.hive.metastore.ExceptionHandler.handleException; import static org.apache.hadoop.hive.metastore.ExceptionHandler.newMetaException; import static org.apache.hadoop.hive.metastore.ExceptionHandler.rethrowException; import static org.apache.hadoop.hive.metastore.ExceptionHandler.throwMetaException; import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_CATALOG_NAME; import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_COMMENT; import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME; import static org.apache.hadoop.hive.metastore.Warehouse.getCatalogQualifiedTableName; import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.CAT_NAME; import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.DB_NAME; import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog; import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.parseDbName; import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.prependCatalogToDbName; import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.prependNotNullCatToDbName; /** * Default handler for all Hive Metastore methods. Implements methods defined in hive_metastore.thrift. */ public class HMSHandler extends FacebookBase implements IHMSHandler { public static final Logger LOG = LoggerFactory.getLogger(HMSHandler.class); private final Configuration conf; // stores datastore (jpox) properties, // right now they come from jpox.properties // Flag to control that always threads are initialized only once // instead of multiple times private final static AtomicBoolean alwaysThreadsInitialized = new AtomicBoolean(false); private static String currentUrl; private FileMetadataManager fileMetadataManager; private PartitionExpressionProxy expressionProxy; private StorageSchemaReader storageSchemaReader; private IMetaStoreMetadataTransformer transformer; private static DataConnectorProviderFactory dataconnectorFactory = null; // Variables for metrics // Package visible so that HMSMetricsListener can see them. static AtomicInteger databaseCount, tableCount, partCount; public static final String PARTITION_NUMBER_EXCEED_LIMIT_MSG = "Number of partitions scanned (=%d) on table '%s' exceeds limit (=%d). This is controlled on the metastore server by %s."; // Used for testing to simulate method timeout. @VisibleForTesting static boolean testTimeoutEnabled = false; @VisibleForTesting static long testTimeoutValue = -1; public static final String TRUNCATE_SKIP_DATA_DELETION = "truncateSkipDataDeletion"; public static final String ADMIN = "admin"; public static final String PUBLIC = "public"; static final String NO_FILTER_STRING = ""; static final int UNLIMITED_MAX_PARTITIONS = -1; private Warehouse wh; // hdfs warehouse private static Striped<Lock> tablelocks; private static final ThreadLocal<RawStore> threadLocalMS = new ThreadLocal<RawStore>(); private static final ThreadLocal<TxnStore> threadLocalTxn = new ThreadLocal<TxnStore>(); private static final ThreadLocal<Map<String, com.codahale.metrics.Timer.Context>> timerContexts = new ThreadLocal<Map<String, com.codahale.metrics.Timer.Context>>() { @Override protected Map<String, com.codahale.metrics.Timer.Context> initialValue() { return new HashMap<>(); } }; public static RawStore getRawStore() { return threadLocalMS.get(); } static void cleanupRawStore() { try { RawStore rs = getRawStore(); if (rs != null) { logAndAudit("Cleaning up thread local RawStore..."); rs.shutdown(); } } finally { HMSHandler handler = threadLocalHMSHandler.get(); if (handler != null) { handler.notifyMetaListenersOnShutDown(); } threadLocalHMSHandler.remove(); threadLocalConf.remove(); threadLocalModifiedConfig.remove(); removeRawStore(); logAndAudit("Done cleaning up thread local RawStore"); } } static void removeRawStore() { threadLocalMS.remove(); } // Thread local configuration is needed as many threads could make changes // to the conf using the connection hook private static final ThreadLocal<Configuration> threadLocalConf = new ThreadLocal<Configuration>(); /** * Thread local HMSHandler used during shutdown to notify meta listeners */ private static final ThreadLocal<HMSHandler> threadLocalHMSHandler = new ThreadLocal<>(); /** * Thread local Map to keep track of modified meta conf keys */ private static final ThreadLocal<Map<String, String>> threadLocalModifiedConfig = new ThreadLocal<Map<String, String>>() { @Override protected Map<String, String> initialValue() { return new HashMap<>(); } }; private static ExecutorService threadPool; static final Logger auditLog = LoggerFactory.getLogger( HiveMetaStore.class.getName() + ".audit"); private static void logAuditEvent(String cmd) { if (cmd == null) { return; } UserGroupInformation ugi; try { ugi = SecurityUtils.getUGI(); } catch (Exception ex) { throw new RuntimeException(ex); } String address = getIPAddress(); if (address == null) { address = "unknown-ip-addr"; } auditLog.info("ugi={} ip={} cmd={} ", ugi.getUserName(), address, cmd); } public static String getIPAddress() { if (HiveMetaStore.useSasl) { if (HiveMetaStore.saslServer != null && HiveMetaStore.saslServer.getRemoteAddress() != null) { return HiveMetaStore.saslServer.getRemoteAddress().getHostAddress(); } } else { // if kerberos is not enabled return getThreadLocalIpAddress(); } return null; } private static AtomicInteger nextSerialNum = new AtomicInteger(); private static ThreadLocal<Integer> threadLocalId = new ThreadLocal<Integer>() { @Override protected Integer initialValue() { return nextSerialNum.getAndIncrement(); } }; // This will only be set if the metastore is being accessed from a metastore Thrift server, // not if it is from the CLI. Also, only if the TTransport being used to connect is an // instance of TSocket. This is also not set when kerberos is used. private static ThreadLocal<String> threadLocalIpAddress = new ThreadLocal<String>(); /** * Internal function to notify listeners for meta config change events */ private void notifyMetaListeners(String key, String oldValue, String newValue) throws MetaException { for (MetaStoreEventListener listener : listeners) { listener.onConfigChange(new ConfigChangeEvent(this, key, oldValue, newValue)); } if (transactionalListeners.size() > 0) { // All the fields of this event are final, so no reason to create a new one for each // listener ConfigChangeEvent cce = new ConfigChangeEvent(this, key, oldValue, newValue); for (MetaStoreEventListener transactionalListener : transactionalListeners) { transactionalListener.onConfigChange(cce); } } } /** * Internal function to notify listeners to revert back to old values of keys * that were modified during setMetaConf. This would get called from HiveMetaStore#cleanupRawStore */ private void notifyMetaListenersOnShutDown() { Map<String, String> modifiedConf = threadLocalModifiedConfig.get(); if (modifiedConf == null) { // Nothing got modified return; } try { Configuration conf = threadLocalConf.get(); if (conf == null) { throw new MetaException("Unexpected: modifiedConf is non-null but conf is null"); } // Notify listeners of the changed value for (Map.Entry<String, String> entry : modifiedConf.entrySet()) { String key = entry.getKey(); // curr value becomes old and vice-versa String currVal = entry.getValue(); String oldVal = conf.get(key); if (!Objects.equals(oldVal, currVal)) { notifyMetaListeners(key, oldVal, currVal); } } logAndAudit("Meta listeners shutdown notification completed."); } catch (MetaException e) { LOG.error("Failed to notify meta listeners on shutdown: ", e); } } static void setThreadLocalIpAddress(String ipAddress) { threadLocalIpAddress.set(ipAddress); } // This will return null if the metastore is not being accessed from a metastore Thrift server, // or if the TTransport being used to connect is not an instance of TSocket, or if kereberos // is used static String getThreadLocalIpAddress() { return threadLocalIpAddress.get(); } // Make it possible for tests to check that the right type of PartitionExpressionProxy was // instantiated. @VisibleForTesting PartitionExpressionProxy getExpressionProxy() { return expressionProxy; } /** * Use {@link #getThreadId()} instead. * @return thread id */ @Deprecated public static Integer get() { return threadLocalId.get(); } @Override public int getThreadId() { return threadLocalId.get(); } public HMSHandler(String name) throws MetaException { this(name, MetastoreConf.newMetastoreConf(), true); } public HMSHandler(String name, Configuration conf) throws MetaException { this(name, conf, true); } public HMSHandler(String name, Configuration conf, boolean init) throws MetaException { super(name); this.conf = conf; isInTest = MetastoreConf.getBoolVar(this.conf, ConfVars.HIVE_IN_TEST); if (threadPool == null) { synchronized (HMSHandler.class) { if (threadPool == null) { int numThreads = MetastoreConf.getIntVar(conf, ConfVars.FS_HANDLER_THREADS_COUNT); threadPool = Executors.newFixedThreadPool(numThreads, new ThreadFactoryBuilder().setDaemon(true).setNameFormat("HMSHandler #%d").build()); int numTableLocks = MetastoreConf.getIntVar(conf, ConfVars.METASTORE_NUM_STRIPED_TABLE_LOCKS); tablelocks = Striped.lock(numTableLocks); } } } if (init) { init(); } } /** * Use {@link #getConf()} instead. * @return Configuration object */ @Deprecated public Configuration getHiveConf() { return conf; } private AlterHandler alterHandler; private List<MetaStorePreEventListener> preListeners; private List<MetaStoreEventListener> listeners; private List<TransactionalMetaStoreEventListener> transactionalListeners; private List<MetaStoreEndFunctionListener> endFunctionListeners; private List<MetaStoreInitListener> initListeners; private MetaStoreFilterHook filterHook; private boolean isServerFilterEnabled = false; private Pattern partitionValidationPattern; private final boolean isInTest; @Override public List<TransactionalMetaStoreEventListener> getTransactionalListeners() { return transactionalListeners; } @Override public List<MetaStoreEventListener> getListeners() { return listeners; } @Override public void init() throws MetaException { initListeners = MetaStoreServerUtils.getMetaStoreListeners( MetaStoreInitListener.class, conf, MetastoreConf.getVar(conf, ConfVars.INIT_HOOKS)); for (MetaStoreInitListener singleInitListener: initListeners) { MetaStoreInitContext context = new MetaStoreInitContext(); singleInitListener.onInit(context); } String alterHandlerName = MetastoreConf.getVar(conf, ConfVars.ALTER_HANDLER); alterHandler = ReflectionUtils.newInstance(JavaUtils.getClass( alterHandlerName, AlterHandler.class), conf); wh = new Warehouse(conf); synchronized (HMSHandler.class) { if (currentUrl == null || !currentUrl.equals(MetaStoreInit.getConnectionURL(conf))) { createDefaultDB(); createDefaultRoles(); addAdminUsers(); currentUrl = MetaStoreInit.getConnectionURL(conf); } } //Start Metrics if (MetastoreConf.getBoolVar(conf, ConfVars.METRICS_ENABLED)) { LOG.info("Begin calculating metadata count metrics."); Metrics.initialize(conf); databaseCount = Metrics.getOrCreateGauge(MetricsConstants.TOTAL_DATABASES); tableCount = Metrics.getOrCreateGauge(MetricsConstants.TOTAL_TABLES); partCount = Metrics.getOrCreateGauge(MetricsConstants.TOTAL_PARTITIONS); updateMetrics(); } preListeners = MetaStoreServerUtils.getMetaStoreListeners(MetaStorePreEventListener.class, conf, MetastoreConf.getVar(conf, ConfVars.PRE_EVENT_LISTENERS)); preListeners.add(0, new TransactionalValidationListener(conf)); listeners = MetaStoreServerUtils.getMetaStoreListeners(MetaStoreEventListener.class, conf, MetastoreConf.getVar(conf, ConfVars.EVENT_LISTENERS)); listeners.add(new SessionPropertiesListener(conf)); transactionalListeners = new ArrayList() {{ add(new AcidEventListener(conf)); }}; transactionalListeners.addAll(MetaStoreServerUtils.getMetaStoreListeners( TransactionalMetaStoreEventListener.class, conf, MetastoreConf.getVar(conf, ConfVars.TRANSACTIONAL_EVENT_LISTENERS))); if (Metrics.getRegistry() != null) { listeners.add(new HMSMetricsListener(conf)); } boolean canCachedStoreCanUseEvent = false; for (MetaStoreEventListener listener : transactionalListeners) { if (listener.doesAddEventsToNotificationLogTable()) { canCachedStoreCanUseEvent = true; break; } } if (conf.getBoolean(ConfVars.METASTORE_CACHE_CAN_USE_EVENT.getVarname(), false) && !canCachedStoreCanUseEvent) { throw new MetaException("CahcedStore can not use events for invalidation as there is no " + " TransactionalMetaStoreEventListener to add events to notification table"); } endFunctionListeners = MetaStoreServerUtils.getMetaStoreListeners( MetaStoreEndFunctionListener.class, conf, MetastoreConf.getVar(conf, ConfVars.END_FUNCTION_LISTENERS)); String partitionValidationRegex = MetastoreConf.getVar(conf, ConfVars.PARTITION_NAME_WHITELIST_PATTERN); if (partitionValidationRegex != null && !partitionValidationRegex.isEmpty()) { partitionValidationPattern = Pattern.compile(partitionValidationRegex); } // We only initialize once the tasks that need to be run periodically. For remote metastore // these threads are started along with the other housekeeping threads only in the leader // HMS. String leaderHost = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.METASTORE_HOUSEKEEPING_LEADER_HOSTNAME); if (!HiveMetaStore.isMetaStoreRemote() && ((leaderHost == null) || leaderHost.trim().isEmpty())) { startAlwaysTaskThreads(conf); } else if (!HiveMetaStore.isMetaStoreRemote()) { LOG.info("Not starting tasks specified by " + ConfVars.TASK_THREADS_ALWAYS.getVarname() + " since " + leaderHost + " is configured to run these tasks."); } expressionProxy = PartFilterExprUtil.createExpressionProxy(conf); fileMetadataManager = new FileMetadataManager(this.getMS(), conf); isServerFilterEnabled = getIfServerFilterenabled(); filterHook = isServerFilterEnabled ? loadFilterHooks() : null; String className = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.METASTORE_METADATA_TRANSFORMER_CLASS); if (className != null && !className.trim().isEmpty()) { try { transformer = JavaUtils.newInstance(JavaUtils.getClass(className.trim(), IMetaStoreMetadataTransformer.class), new Class[] {IHMSHandler.class}, new Object[] {this}); } catch (Exception e) { LOG.error("Unable to create instance of class " + className, e); throw new IllegalArgumentException(e); } } dataconnectorFactory = DataConnectorProviderFactory.getInstance(this); } static void startAlwaysTaskThreads(Configuration conf) throws MetaException { if (alwaysThreadsInitialized.compareAndSet(false, true)) { ThreadPool.initialize(conf); Collection<String> taskNames = MetastoreConf.getStringCollection(conf, ConfVars.TASK_THREADS_ALWAYS); for (String taskName : taskNames) { MetastoreTaskThread task = JavaUtils.newInstance(JavaUtils.getClass(taskName, MetastoreTaskThread.class)); task.setConf(conf); long freq = task.runFrequency(TimeUnit.MILLISECONDS); LOG.info("Scheduling for " + task.getClass().getCanonicalName() + " service with " + "frequency " + freq + "ms."); // For backwards compatibility, since some threads used to be hard coded but only run if // frequency was > 0 if (freq > 0) { ThreadPool.getPool().scheduleAtFixedRate(task, freq, freq, TimeUnit.MILLISECONDS); } } } } /** * * Filter is actually enabled only when the configured filter hook is configured, not default, and * enabled in configuration * @return */ private boolean getIfServerFilterenabled() throws MetaException{ boolean isEnabled = MetastoreConf.getBoolVar(conf, ConfVars.METASTORE_SERVER_FILTER_ENABLED); if (!isEnabled) { LOG.info("HMS server filtering is disabled by configuration"); return false; } String filterHookClassName = MetastoreConf.getVar(conf, ConfVars.FILTER_HOOK); if (isBlank(filterHookClassName)) { throw new MetaException("HMS server filtering is enabled but no filter hook is configured"); } if (filterHookClassName.trim().equalsIgnoreCase(DefaultMetaStoreFilterHookImpl.class.getName())) { throw new MetaException("HMS server filtering is enabled but the filter hook is DefaultMetaStoreFilterHookImpl, which does no filtering"); } LOG.info("HMS server filtering is enabled. The filter class is " + filterHookClassName); return true; } private MetaStoreFilterHook loadFilterHooks() throws IllegalStateException { String errorMsg = "Unable to load filter hook at HMS server. "; String filterHookClassName = MetastoreConf.getVar(conf, ConfVars.FILTER_HOOK); Preconditions.checkState(!isBlank(filterHookClassName)); try { return (MetaStoreFilterHook)Class.forName( filterHookClassName.trim(), true, JavaUtils.getClassLoader()).getConstructor( Configuration.class).newInstance(conf); } catch (Exception e) { LOG.error(errorMsg, e); throw new IllegalStateException(errorMsg + e.getMessage(), e); } } /** * Check if user can access the table associated with the partition. If not, then throw exception * so user cannot access partitions associated with this table * We are not calling Pre event listener for authorization because it requires getting the * table object from DB, more overhead. Instead ,we call filter hook to filter out table if user * has no access. Filter hook only requires table name, not table object. That saves DB access for * table object, and still achieve the same purpose: checking if user can access the specified * table * * @param catName catalog name of the table * @param dbName database name of the table * @param tblName table name * @throws NoSuchObjectException * @throws MetaException */ private void authorizeTableForPartitionMetadata( final String catName, final String dbName, final String tblName) throws NoSuchObjectException, MetaException { FilterUtils.checkDbAndTableFilters( isServerFilterEnabled, filterHook, catName, dbName, tblName); } private static String addPrefix(String s) { return threadLocalId.get() + ": " + s; } /** * Set copy of invoking HMSHandler on thread local */ private static void setHMSHandler(HMSHandler handler) { if (threadLocalHMSHandler.get() == null) { threadLocalHMSHandler.set(handler); } } @Override public void setConf(Configuration conf) { threadLocalConf.set(conf); RawStore ms = threadLocalMS.get(); if (ms != null) { ms.setConf(conf); // reload if DS related configuration is changed } } @Override public Configuration getConf() { Configuration conf = threadLocalConf.get(); if (conf == null) { conf = new Configuration(this.conf); threadLocalConf.set(conf); } return conf; } @Override public Warehouse getWh() { return wh; } @Override public void setMetaConf(String key, String value) throws MetaException { ConfVars confVar = MetastoreConf.getMetaConf(key); if (confVar == null) { throw new MetaException("Invalid configuration key " + key); } try { confVar.validate(value); } catch (IllegalArgumentException e) { throw new MetaException("Invalid configuration value " + value + " for key " + key + " by " + e.getMessage()); } Configuration configuration = getConf(); String oldValue = MetastoreConf.get(configuration, key); // Save prev val of the key on threadLocal Map<String, String> modifiedConf = threadLocalModifiedConfig.get(); if (!modifiedConf.containsKey(key)) { modifiedConf.put(key, oldValue); } // Set invoking HMSHandler on threadLocal, this will be used later to notify // metaListeners in HiveMetaStore#cleanupRawStore setHMSHandler(this); configuration.set(key, value); notifyMetaListeners(key, oldValue, value); if (ConfVars.TRY_DIRECT_SQL == confVar) { HMSHandler.LOG.info("Direct SQL optimization = {}", value); } } @Override public String getMetaConf(String key) throws MetaException { ConfVars confVar = MetastoreConf.getMetaConf(key); if (confVar == null) { throw new MetaException("Invalid configuration key " + key); } return getConf().get(key, confVar.getDefaultVal().toString()); } /** * Get a cached RawStore. * * @return the cached RawStore * @throws MetaException */ @Override public RawStore getMS() throws MetaException { Configuration conf = getConf(); return getMSForConf(conf); } public static RawStore getMSForConf(Configuration conf) throws MetaException { RawStore ms = threadLocalMS.get(); if (ms == null) { ms = newRawStoreForConf(conf); try { ms.verifySchema(); } catch (MetaException e) { ms.shutdown(); throw e; } threadLocalMS.set(ms); ms = threadLocalMS.get(); LOG.info("Created RawStore: " + ms + " from thread id: " + Thread.currentThread().getId()); } return ms; } @Override public TxnStore getTxnHandler() { return getMsThreadTxnHandler(conf); } public static TxnStore getMsThreadTxnHandler(Configuration conf) { TxnStore txn = threadLocalTxn.get(); if (txn == null) { txn = TxnUtils.getTxnStore(conf); threadLocalTxn.set(txn); } return txn; } static RawStore newRawStoreForConf(Configuration conf) throws MetaException { Configuration newConf = new Configuration(conf); String rawStoreClassName = MetastoreConf.getVar(newConf, ConfVars.RAW_STORE_IMPL); LOG.info(addPrefix("Opening raw store with implementation class:" + rawStoreClassName)); return RawStoreProxy.getProxy(newConf, conf, rawStoreClassName, threadLocalId.get()); } @VisibleForTesting public static void createDefaultCatalog(RawStore ms, Warehouse wh) throws MetaException, InvalidOperationException { try { Catalog defaultCat = ms.getCatalog(DEFAULT_CATALOG_NAME); // Null check because in some test cases we get a null from ms.getCatalog. if (defaultCat !=null && defaultCat.getLocationUri().equals("TBD")) { // One time update issue. When the new 'hive' catalog is created in an upgrade the // script does not know the location of the warehouse. So we need to update it. LOG.info("Setting location of default catalog, as it hasn't been done after upgrade"); defaultCat.setLocationUri(wh.getWhRoot().toString()); ms.alterCatalog(defaultCat.getName(), defaultCat); } } catch (NoSuchObjectException e) { Catalog cat = new Catalog(DEFAULT_CATALOG_NAME, wh.getWhRoot().toString()); long time = System.currentTimeMillis() / 1000; cat.setCreateTime((int) time); cat.setDescription(Warehouse.DEFAULT_CATALOG_COMMENT); ms.createCatalog(cat); } } private void createDefaultDB_core(RawStore ms) throws MetaException, InvalidObjectException { try { ms.getDatabase(DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME); } catch (NoSuchObjectException e) { LOG.info("Started creating a default database with name: "+DEFAULT_DATABASE_NAME); Database db = new Database(DEFAULT_DATABASE_NAME, DEFAULT_DATABASE_COMMENT, wh.getDefaultDatabasePath(DEFAULT_DATABASE_NAME, true).toString(), null); db.setOwnerName(PUBLIC); db.setOwnerType(PrincipalType.ROLE); db.setCatalogName(DEFAULT_CATALOG_NAME); long time = System.currentTimeMillis() / 1000; db.setCreateTime((int) time); db.setType(DatabaseType.NATIVE); ms.createDatabase(db); LOG.info("Successfully created a default database with name: "+DEFAULT_DATABASE_NAME); } } /** * create default database if it doesn't exist. * * This is a potential contention when HiveServer2 using embedded metastore and Metastore * Server try to concurrently invoke createDefaultDB. If one failed, JDOException was caught * for one more time try, if failed again, simply ignored by warning, which meant another * succeeds. * * @throws MetaException */ private void createDefaultDB() throws MetaException { try { RawStore ms = getMS(); createDefaultCatalog(ms, wh); createDefaultDB_core(ms); } catch (JDOException e) { LOG.warn("Retrying creating default database after error: " + e.getMessage(), e); try { RawStore ms = getMS(); createDefaultCatalog(ms, wh); createDefaultDB_core(ms); } catch (InvalidObjectException | InvalidOperationException e1) { throw new MetaException(e1.getMessage()); } } catch (InvalidObjectException|InvalidOperationException e) { throw new MetaException(e.getMessage()); } } /** * create default roles if they don't exist. * * This is a potential contention when HiveServer2 using embedded metastore and Metastore * Server try to concurrently invoke createDefaultRoles. If one failed, JDOException was caught * for one more time try, if failed again, simply ignored by warning, which meant another * succeeds. * * @throws MetaException */ private void createDefaultRoles() throws MetaException { try { createDefaultRoles_core(); } catch (JDOException e) { LOG.warn("Retrying creating default roles after error: " + e.getMessage(), e); createDefaultRoles_core(); } } private void createDefaultRoles_core() throws MetaException { RawStore ms = getMS(); try { ms.addRole(ADMIN, ADMIN); } catch (InvalidObjectException e) { LOG.debug(ADMIN +" role already exists",e); } catch (NoSuchObjectException e) { // This should never be thrown. LOG.warn("Unexpected exception while adding " +ADMIN+" roles" , e); } LOG.info("Added "+ ADMIN+ " role in metastore"); try { ms.addRole(PUBLIC, PUBLIC); } catch (InvalidObjectException e) { LOG.debug(PUBLIC + " role already exists",e); } catch (NoSuchObjectException e) { // This should never be thrown. LOG.warn("Unexpected exception while adding "+PUBLIC +" roles" , e); } LOG.info("Added "+PUBLIC+ " role in metastore"); // now grant all privs to admin PrivilegeBag privs = new PrivilegeBag(); privs.addToPrivileges(new HiveObjectPrivilege( new HiveObjectRef(HiveObjectType.GLOBAL, null, null, null, null), ADMIN, PrincipalType.ROLE, new PrivilegeGrantInfo("All", 0, ADMIN, PrincipalType.ROLE, true), "SQL")); try { ms.grantPrivileges(privs); } catch (InvalidObjectException e) { // Surprisingly these privs are already granted. LOG.debug("Failed while granting global privs to admin", e); } catch (NoSuchObjectException e) { // Unlikely to be thrown. LOG.warn("Failed while granting global privs to admin", e); } } /** * add admin users if they don't exist. * * This is a potential contention when HiveServer2 using embedded metastore and Metastore * Server try to concurrently invoke addAdminUsers. If one failed, JDOException was caught for * one more time try, if failed again, simply ignored by warning, which meant another succeeds. * * @throws MetaException */ private void addAdminUsers() throws MetaException { try { addAdminUsers_core(); } catch (JDOException e) { LOG.warn("Retrying adding admin users after error: " + e.getMessage(), e); addAdminUsers_core(); } } private void addAdminUsers_core() throws MetaException { // now add pre-configured users to admin role String userStr = MetastoreConf.getVar(conf,ConfVars.USERS_IN_ADMIN_ROLE,"").trim(); if (userStr.isEmpty()) { LOG.info("No user is added in admin role, since config is empty"); return; } // Since user names need to be valid unix user names, per IEEE Std 1003.1-2001 they cannot // contain comma, so we can safely split above string on comma. Iterator<String> users = Splitter.on(",").trimResults().omitEmptyStrings().split(userStr).iterator(); if (!users.hasNext()) { LOG.info("No user is added in admin role, since config value "+ userStr + " is in incorrect format. We accept comma separated list of users."); return; } Role adminRole; RawStore ms = getMS(); try { adminRole = ms.getRole(ADMIN); } catch (NoSuchObjectException e) { LOG.error("Failed to retrieve just added admin role",e); return; } while (users.hasNext()) { String userName = users.next(); try { ms.grantRole(adminRole, userName, PrincipalType.USER, ADMIN, PrincipalType.ROLE, true); LOG.info("Added " + userName + " to admin role"); } catch (NoSuchObjectException e) { LOG.error("Failed to add "+ userName + " in admin role",e); } catch (InvalidObjectException e) { LOG.debug(userName + " already in admin role", e); } } } private static void logAndAudit(final String m) { LOG.debug("{}: {}", threadLocalId.get(), m); logAuditEvent(m); } private String startFunction(String function, String extraLogInfo) { incrementCounter(function); logAndAudit((getThreadLocalIpAddress() == null ? "" : "source:" + getThreadLocalIpAddress() + " ") + function + extraLogInfo); com.codahale.metrics.Timer timer = Metrics.getOrCreateTimer(MetricsConstants.API_PREFIX + function); if (timer != null) { // Timer will be null we aren't using the metrics timerContexts.get().put(function, timer.time()); } Counter counter = Metrics.getOrCreateCounter(MetricsConstants.ACTIVE_CALLS + function); if (counter != null) { counter.inc(); } return function; } private String startFunction(String function) { return startFunction(function, ""); } private void startTableFunction(String function, String catName, String db, String tbl) { startFunction(function, " : tbl=" + TableName.getQualified(catName, db, tbl)); } private void startMultiTableFunction(String function, String db, List<String> tbls) { String tableNames = join(tbls, ","); startFunction(function, " : db=" + db + " tbls=" + tableNames); } private void startPartitionFunction(String function, String cat, String db, String tbl, List<String> partVals) { startFunction(function, " : tbl=" + TableName.getQualified(cat, db, tbl) + "[" + join(partVals, ",") + "]"); } private void startPartitionFunction(String function, String catName, String db, String tbl, Map<String, String> partName) { startFunction(function, " : tbl=" + TableName.getQualified(catName, db, tbl) + "partition=" + partName); } private void endFunction(String function, boolean successful, Exception e) { endFunction(function, successful, e, null); } private void endFunction(String function, boolean successful, Exception e, String inputTableName) { endFunction(function, new MetaStoreEndFunctionContext(successful, e, inputTableName)); } private void endFunction(String function, MetaStoreEndFunctionContext context) { com.codahale.metrics.Timer.Context timerContext = timerContexts.get().remove(function); if (timerContext != null) { long timeTaken = timerContext.stop(); LOG.debug((getThreadLocalIpAddress() == null ? "" : "source:" + getThreadLocalIpAddress() + " ") + function + "time taken(ns): " + timeTaken); } Counter counter = Metrics.getOrCreateCounter(MetricsConstants.ACTIVE_CALLS + function); if (counter != null) { counter.dec(); } for (MetaStoreEndFunctionListener listener : endFunctionListeners) { listener.onEndFunction(function, context); } } @Override public fb_status getStatus() { return fb_status.ALIVE; } @Override public void shutdown() { cleanupRawStore(); PerfLogger.getPerfLogger(false).cleanupPerfLogMetrics(); ThreadPool.shutdown(); } @Override public AbstractMap<String, Long> getCounters() { AbstractMap<String, Long> counters = super.getCounters(); // Allow endFunctionListeners to add any counters they have collected if (endFunctionListeners != null) { for (MetaStoreEndFunctionListener listener : endFunctionListeners) { listener.exportCounters(counters); } } return counters; } @Override public void create_catalog(CreateCatalogRequest rqst) throws AlreadyExistsException, InvalidObjectException, MetaException { Catalog catalog = rqst.getCatalog(); startFunction("create_catalog", ": " + catalog.toString()); boolean success = false; Exception ex = null; try { try { getMS().getCatalog(catalog.getName()); throw new AlreadyExistsException("Catalog " + catalog.getName() + " already exists"); } catch (NoSuchObjectException e) { // expected } if (!MetaStoreUtils.validateName(catalog.getName(), null)) { throw new InvalidObjectException(catalog.getName() + " is not a valid catalog name"); } if (catalog.getLocationUri() == null) { throw new InvalidObjectException("You must specify a path for the catalog"); } RawStore ms = getMS(); Path catPath = new Path(catalog.getLocationUri()); boolean madeDir = false; Map<String, String> transactionalListenersResponses = Collections.emptyMap(); try { firePreEvent(new PreCreateCatalogEvent(this, catalog)); if (!wh.isDir(catPath)) { if (!wh.mkdirs(catPath)) { throw new MetaException("Unable to create catalog path " + catPath + ", failed to create catalog " + catalog.getName()); } madeDir = true; } // set the create time of catalog long time = System.currentTimeMillis() / 1000; catalog.setCreateTime((int) time); ms.openTransaction(); ms.createCatalog(catalog); // Create a default database inside the catalog Database db = new Database(DEFAULT_DATABASE_NAME, "Default database for catalog " + catalog.getName(), catalog.getLocationUri(), Collections.emptyMap()); db.setCatalogName(catalog.getName()); create_database_core(ms, db); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.CREATE_CATALOG, new CreateCatalogEvent(true, this, catalog)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); if (madeDir) { wh.deleteDir(catPath, true, false, false); } } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.CREATE_CATALOG, new CreateCatalogEvent(success, this, catalog), null, transactionalListenersResponses, ms); } } success = true; } catch (AlreadyExistsException|InvalidObjectException|MetaException e) { ex = e; throw e; } finally { endFunction("create_catalog", success, ex); } } @Override public void alter_catalog(AlterCatalogRequest rqst) throws TException { startFunction("alter_catalog " + rqst.getName()); boolean success = false; Exception ex = null; RawStore ms = getMS(); Map<String, String> transactionalListenersResponses = Collections.emptyMap(); GetCatalogResponse oldCat = null; try { oldCat = get_catalog(new GetCatalogRequest(rqst.getName())); // Above should have thrown NoSuchObjectException if there is no such catalog assert oldCat != null && oldCat.getCatalog() != null; firePreEvent(new PreAlterCatalogEvent(oldCat.getCatalog(), rqst.getNewCat(), this)); ms.openTransaction(); ms.alterCatalog(rqst.getName(), rqst.getNewCat()); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ALTER_CATALOG, new AlterCatalogEvent(oldCat.getCatalog(), rqst.getNewCat(), true, this)); } success = ms.commitTransaction(); } catch (MetaException|NoSuchObjectException e) { ex = e; throw e; } finally { if (!success) { ms.rollbackTransaction(); } if ((null != oldCat) && (!listeners.isEmpty())) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_CATALOG, new AlterCatalogEvent(oldCat.getCatalog(), rqst.getNewCat(), success, this), null, transactionalListenersResponses, ms); } endFunction("alter_catalog", success, ex); } } @Override public GetCatalogResponse get_catalog(GetCatalogRequest rqst) throws NoSuchObjectException, TException { String catName = rqst.getName(); startFunction("get_catalog", ": " + catName); Catalog cat = null; Exception ex = null; try { cat = getMS().getCatalog(catName); firePreEvent(new PreReadCatalogEvent(this, cat)); return new GetCatalogResponse(cat); } catch (MetaException|NoSuchObjectException e) { ex = e; throw e; } finally { endFunction("get_catalog", cat != null, ex); } } @Override public GetCatalogsResponse get_catalogs() throws MetaException { startFunction("get_catalogs"); List<String> ret = null; Exception ex = null; try { ret = getMS().getCatalogs(); } catch (Exception e) { ex = e; throw e; } finally { endFunction("get_catalog", ret != null, ex); } return new GetCatalogsResponse(ret == null ? Collections.emptyList() : ret); } @Override public void drop_catalog(DropCatalogRequest rqst) throws NoSuchObjectException, InvalidOperationException, MetaException { String catName = rqst.getName(); startFunction("drop_catalog", ": " + catName); if (DEFAULT_CATALOG_NAME.equalsIgnoreCase(catName)) { endFunction("drop_catalog", false, null); throw new MetaException("Can not drop " + DEFAULT_CATALOG_NAME + " catalog"); } boolean success = false; Exception ex = null; try { dropCatalogCore(catName); success = true; } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(NoSuchObjectException.class, InvalidOperationException.class, MetaException.class) .defaultMetaException(); } finally { endFunction("drop_catalog", success, ex); } } private void dropCatalogCore(String catName) throws MetaException, NoSuchObjectException, InvalidOperationException { boolean success = false; Catalog cat = null; Map<String, String> transactionalListenerResponses = Collections.emptyMap(); RawStore ms = getMS(); try { ms.openTransaction(); cat = ms.getCatalog(catName); firePreEvent(new PreDropCatalogEvent(this, cat)); List<String> allDbs = get_databases(prependNotNullCatToDbName(catName, null)); if (allDbs != null && !allDbs.isEmpty()) { // It might just be the default, in which case we can drop that one if it's empty if (allDbs.size() == 1 && allDbs.get(0).equals(DEFAULT_DATABASE_NAME)) { try { drop_database_core(ms, catName, DEFAULT_DATABASE_NAME, true, false); } catch (InvalidOperationException e) { // This means there are tables of something in the database throw new InvalidOperationException("There are still objects in the default " + "database for catalog " + catName); } catch (InvalidObjectException|IOException|InvalidInputException e) { MetaException me = new MetaException("Error attempt to drop default database for " + "catalog " + catName); me.initCause(e); throw me; } } else { throw new InvalidOperationException("There are non-default databases in the catalog " + catName + " so it cannot be dropped."); } } ms.dropCatalog(catName) ; if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DROP_CATALOG, new DropCatalogEvent(true, this, cat)); } success = ms.commitTransaction(); } finally { if (success) { wh.deleteDir(wh.getDnsPath(new Path(cat.getLocationUri())), false, false, false); } else { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_CATALOG, new DropCatalogEvent(success, this, cat), null, transactionalListenerResponses, ms); } } } static boolean isDbReplicationTarget(Database db) { if (db.getParameters() == null) { return false; } if (!db.getParameters().containsKey(ReplConst.REPL_TARGET_DB_PROPERTY)) { return false; } return !db.getParameters().get(ReplConst.REPL_TARGET_DB_PROPERTY).trim().isEmpty(); } // Assumes that the catalog has already been set. private void create_database_core(RawStore ms, final Database db) throws AlreadyExistsException, InvalidObjectException, MetaException { if (!MetaStoreUtils.validateName(db.getName(), conf)) { throw new InvalidObjectException(db.getName() + " is not a valid database name"); } Catalog cat = null; try { cat = getMS().getCatalog(db.getCatalogName()); } catch (NoSuchObjectException e) { LOG.error("No such catalog " + db.getCatalogName()); throw new InvalidObjectException("No such catalog " + db.getCatalogName()); } boolean skipAuthorization = false; String passedInURI = db.getLocationUri(); String passedInManagedURI = db.getManagedLocationUri(); if (passedInURI == null && passedInManagedURI == null) { skipAuthorization = true; } final Path defaultDbExtPath = wh.getDefaultDatabasePath(db.getName(), true); final Path defaultDbMgdPath = wh.getDefaultDatabasePath(db.getName(), false); final Path dbExtPath = (passedInURI != null) ? wh.getDnsPath(new Path(passedInURI)) : wh.determineDatabasePath(cat, db); final Path dbMgdPath = (passedInManagedURI != null) ? wh.getDnsPath(new Path(passedInManagedURI)) : null; if ((defaultDbExtPath.equals(dbExtPath) && defaultDbMgdPath.equals(dbMgdPath)) && ((dbMgdPath == null) || dbMgdPath.equals(defaultDbMgdPath))) { skipAuthorization = true; } if ( skipAuthorization ) { //null out to skip authorizer URI check db.setLocationUri(null); db.setManagedLocationUri(null); }else{ db.setLocationUri(dbExtPath.toString()); if (dbMgdPath != null) { db.setManagedLocationUri(dbMgdPath.toString()); } } if (db.getOwnerName() == null){ try { db.setOwnerName(SecurityUtils.getUGI().getShortUserName()); }catch (Exception e){ LOG.warn("Failed to get owner name for create database operation.", e); } } long time = System.currentTimeMillis()/1000; db.setCreateTime((int) time); boolean success = false; boolean madeManagedDir = false; boolean madeExternalDir = false; boolean isReplicated = isDbReplicationTarget(db); Map<String, String> transactionalListenersResponses = Collections.emptyMap(); try { firePreEvent(new PreCreateDatabaseEvent(db, this)); //reinstate location uri for metastore db. if (skipAuthorization == true){ db.setLocationUri(dbExtPath.toString()); if (dbMgdPath != null) { db.setManagedLocationUri(dbMgdPath.toString()); } } if (db.getCatalogName() != null && !db.getCatalogName(). equals(Warehouse.DEFAULT_CATALOG_NAME)) { if (!wh.isDir(dbExtPath)) { LOG.debug("Creating database path " + dbExtPath); if (!wh.mkdirs(dbExtPath)) { throw new MetaException("Unable to create database path " + dbExtPath + ", failed to create database " + db.getName()); } madeExternalDir = true; } } else { if (dbMgdPath != null) { try { // Since this may be done as random user (if doAs=true) he may not have access // to the managed directory. We run this as an admin user madeManagedDir = UserGroupInformation.getLoginUser().doAs(new PrivilegedExceptionAction<Boolean>() { @Override public Boolean run() throws MetaException { if (!wh.isDir(dbMgdPath)) { LOG.info("Creating database path in managed directory " + dbMgdPath); if (!wh.mkdirs(dbMgdPath)) { throw new MetaException("Unable to create database managed path " + dbMgdPath + ", failed to create database " + db.getName()); } return true; } return false; } }); if (madeManagedDir) { LOG.info("Created database path in managed directory " + dbMgdPath); } else if (!isInTest || !isDbReplicationTarget(db)) { // Hive replication tests doesn't drop the db after each test throw new MetaException( "Unable to create database managed directory " + dbMgdPath + ", failed to create database " + db.getName()); } } catch (IOException | InterruptedException e) { throw new MetaException( "Unable to create database managed directory " + dbMgdPath + ", failed to create database " + db.getName() + ":" + e.getMessage()); } } if (dbExtPath != null) { try { madeExternalDir = UserGroupInformation.getCurrentUser().doAs(new PrivilegedExceptionAction<Boolean>() { @Override public Boolean run() throws MetaException { if (!wh.isDir(dbExtPath)) { LOG.info("Creating database path in external directory " + dbExtPath); return wh.mkdirs(dbExtPath); } return false; } }); if (madeExternalDir) { LOG.info("Created database path in external directory " + dbExtPath); } else { LOG.warn("Failed to create external path " + dbExtPath + " for database " + db.getName() + ". This may result in access not being allowed if the " + "StorageBasedAuthorizationProvider is enabled"); } } catch (IOException | InterruptedException | UndeclaredThrowableException e) { throw new MetaException("Failed to create external path " + dbExtPath + " for database " + db.getName() + ". This may result in access not being allowed if the " + "StorageBasedAuthorizationProvider is enabled: " + e.getMessage()); } } else { LOG.info("Database external path won't be created since the external warehouse directory is not defined"); } } ms.openTransaction(); ms.createDatabase(db); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.CREATE_DATABASE, new CreateDatabaseEvent(db, true, this, isReplicated)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); if (db.getCatalogName() != null && !db.getCatalogName(). equals(Warehouse.DEFAULT_CATALOG_NAME)) { if (madeManagedDir && dbMgdPath != null) { wh.deleteDir(dbMgdPath, true, db); } } else { if (madeManagedDir && dbMgdPath != null) { try { UserGroupInformation.getLoginUser().doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { wh.deleteDir(dbMgdPath, true, db); return null; } }); } catch (IOException | InterruptedException e) { LOG.error( "Couldn't delete managed directory " + dbMgdPath + " after " + "it was created for database " + db.getName() + " " + e.getMessage()); } } if (madeExternalDir && dbExtPath != null) { try { UserGroupInformation.getCurrentUser().doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { wh.deleteDir(dbExtPath, true, db); return null; } }); } catch (IOException | InterruptedException e) { LOG.error("Couldn't delete external directory " + dbExtPath + " after " + "it was created for database " + db.getName() + " " + e.getMessage()); } } } } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.CREATE_DATABASE, new CreateDatabaseEvent(db, success, this, isReplicated), null, transactionalListenersResponses, ms); } } } @Override public void create_database(final Database db) throws AlreadyExistsException, InvalidObjectException, MetaException { startFunction("create_database", ": " + db.toString()); boolean success = false; Exception ex = null; if (!db.isSetCatalogName()) { db.setCatalogName(getDefaultCatalog(conf)); } try { try { if (null != get_database_core(db.getCatalogName(), db.getName())) { throw new AlreadyExistsException("Database " + db.getName() + " already exists"); } } catch (NoSuchObjectException e) { // expected } if (testTimeoutEnabled) { try { Thread.sleep(testTimeoutValue); } catch (InterruptedException e) { // do nothing } Deadline.checkTimeout(); } create_database_core(getMS(), db); success = true; } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(MetaException.class, InvalidObjectException.class, AlreadyExistsException.class) .defaultMetaException(); } finally { endFunction("create_database", success, ex); } } @Override public Database get_database(final String name) throws NoSuchObjectException, MetaException { GetDatabaseRequest request = new GetDatabaseRequest(); String[] parsedDbName = parseDbName(name, conf); request.setName(parsedDbName[DB_NAME]); if (parsedDbName[CAT_NAME] != null) { request.setCatalogName(parsedDbName[CAT_NAME]); } return get_database_req(request); } @Override public Database get_database_core(String catName, final String name) throws NoSuchObjectException, MetaException { Database db = null; if (name == null) { throw new MetaException("Database name cannot be null."); } try { db = getMS().getDatabase(catName, name); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class, NoSuchObjectException.class) .defaultRuntimeException(); } return db; } @Override public Database get_database_req(GetDatabaseRequest request) throws NoSuchObjectException, MetaException { startFunction("get_database", ": " + request.getName()); Database db = null; Exception ex = null; if (request.getName() == null) { throw new MetaException("Database name cannot be null."); } List<String> processorCapabilities = request.getProcessorCapabilities(); String processorId = request.getProcessorIdentifier(); try { db = getMS().getDatabase(request.getCatalogName(), request.getName()); firePreEvent(new PreReadDatabaseEvent(db, this)); if (transformer != null) { db = transformer.transformDatabase(db, processorCapabilities, processorId); } } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, NoSuchObjectException.class) .defaultRuntimeException(); } finally { endFunction("get_database", db != null, ex); } return db; } @Override public void alter_database(final String dbName, final Database newDB) throws TException { startFunction("alter_database " + dbName); boolean success = false; Exception ex = null; RawStore ms = getMS(); Database oldDB = null; Map<String, String> transactionalListenersResponses = Collections.emptyMap(); // Perform the same URI normalization as create_database_core. if (newDB.getLocationUri() != null) { newDB.setLocationUri(wh.getDnsPath(new Path(newDB.getLocationUri())).toString()); } String[] parsedDbName = parseDbName(dbName, conf); // We can replicate into an empty database, in which case newDB will have indication that // it's target of replication but not oldDB. But replication flow will never alter a // database so that oldDB indicates that it's target or replication but not the newDB. So, // relying solely on newDB to check whether the database is target of replication works. boolean isReplicated = isDbReplicationTarget(newDB); try { oldDB = get_database_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME]); if (oldDB == null) { throw new MetaException("Could not alter database \"" + parsedDbName[DB_NAME] + "\". Could not retrieve old definition."); } firePreEvent(new PreAlterDatabaseEvent(oldDB, newDB, this)); ms.openTransaction(); ms.alterDatabase(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], newDB); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ALTER_DATABASE, new AlterDatabaseEvent(oldDB, newDB, true, this, isReplicated)); } success = ms.commitTransaction(); } catch (MetaException|NoSuchObjectException e) { ex = e; throw e; } finally { if (!success) { ms.rollbackTransaction(); } if ((null != oldDB) && (!listeners.isEmpty())) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_DATABASE, new AlterDatabaseEvent(oldDB, newDB, success, this, isReplicated), null, transactionalListenersResponses, ms); } endFunction("alter_database", success, ex); } } private void drop_database_core(RawStore ms, String catName, final String name, final boolean deleteData, final boolean cascade) throws NoSuchObjectException, InvalidOperationException, MetaException, IOException, InvalidObjectException, InvalidInputException { boolean success = false; Database db = null; List<Path> tablePaths = new ArrayList<>(); List<Path> partitionPaths = new ArrayList<>(); Map<String, String> transactionalListenerResponses = Collections.emptyMap(); if (name == null) { throw new MetaException("Database name cannot be null."); } boolean isReplicated = false; try { ms.openTransaction(); db = ms.getDatabase(catName, name); if (db.getType() == DatabaseType.REMOTE) { success = drop_remote_database_core(ms, db); return; } isReplicated = isDbReplicationTarget(db); if (!isInTest && ReplChangeManager.isSourceOfReplication(db)) { throw new InvalidOperationException("can not drop a database which is a source of replication"); } firePreEvent(new PreDropDatabaseEvent(db, this)); String catPrependedName = MetaStoreUtils.prependCatalogToDbName(catName, name, conf); Set<String> uniqueTableNames = new HashSet<>(get_all_tables(catPrependedName)); List<String> allFunctions = get_functions(catPrependedName, "*"); ListStoredProcedureRequest request = new ListStoredProcedureRequest(catName); request.setDbName(name); List<String> allProcedures = get_all_stored_procedures(request); ListPackageRequest pkgRequest = new ListPackageRequest(catName); pkgRequest.setDbName(name); List<String> allPackages = get_all_packages(pkgRequest); if (!cascade) { if (!uniqueTableNames.isEmpty()) { throw new InvalidOperationException( "Database " + db.getName() + " is not empty. One or more tables exist."); } if (!allFunctions.isEmpty()) { throw new InvalidOperationException( "Database " + db.getName() + " is not empty. One or more functions exist."); } if (!allProcedures.isEmpty()) { throw new InvalidOperationException( "Database " + db.getName() + " is not empty. One or more stored procedures exist."); } if (!allPackages.isEmpty()) { throw new InvalidOperationException( "Database " + db.getName() + " is not empty. One or more packages exist."); } } Path path = new Path(db.getLocationUri()).getParent(); if (!wh.isWritable(path)) { throw new MetaException("Database not dropped since its external warehouse location " + path + " is not writable by " + SecurityUtils.getUser()); } path = wh.getDatabaseManagedPath(db).getParent(); if (!wh.isWritable(path)) { throw new MetaException("Database not dropped since its managed warehouse location " + path + " is not writable by " + SecurityUtils.getUser()); } Path databasePath = wh.getDnsPath(wh.getDatabasePath(db)); // drop any functions before dropping db for (String funcName : allFunctions) { drop_function(catPrependedName, funcName); } for (String procName : allProcedures) { drop_stored_procedure(new StoredProcedureRequest(catName, name, procName)); } for (String pkgName : allPackages) { drop_package(new DropPackageRequest(catName, name, pkgName)); } final int tableBatchSize = MetastoreConf.getIntVar(conf, ConfVars.BATCH_RETRIEVE_MAX); // First pass will drop the materialized views List<String> materializedViewNames = getTablesByTypeCore(catName, name, ".*", TableType.MATERIALIZED_VIEW.toString()); int startIndex = 0; // retrieve the tables from the metastore in batches to alleviate memory constraints while (startIndex < materializedViewNames.size()) { int endIndex = Math.min(startIndex + tableBatchSize, materializedViewNames.size()); List<Table> materializedViews; try { materializedViews = ms.getTableObjectsByName(catName, name, materializedViewNames.subList(startIndex, endIndex)); } catch (UnknownDBException e) { throw new MetaException(e.getMessage()); } if (materializedViews != null && !materializedViews.isEmpty()) { for (Table materializedView : materializedViews) { if (materializedView.getSd().getLocation() != null) { Path materializedViewPath = wh.getDnsPath(new Path(materializedView.getSd().getLocation())); if (!FileUtils.isSubdirectory(databasePath.toString(), materializedViewPath.toString())) { if (!wh.isWritable(materializedViewPath.getParent())) { throw new MetaException("Database metadata not deleted since table: " + materializedView.getTableName() + " has a parent location " + materializedViewPath.getParent() + " which is not writable by " + SecurityUtils.getUser()); } tablePaths.add(materializedViewPath); } } // Drop the materialized view but not its data drop_table(name, materializedView.getTableName(), false); // Remove from all tables uniqueTableNames.remove(materializedView.getTableName()); } } startIndex = endIndex; } // drop tables before dropping db List<String> allTables = new ArrayList<>(uniqueTableNames); startIndex = 0; // retrieve the tables from the metastore in batches to alleviate memory constraints while (startIndex < allTables.size()) { int endIndex = Math.min(startIndex + tableBatchSize, allTables.size()); List<Table> tables; try { tables = ms.getTableObjectsByName(catName, name, allTables.subList(startIndex, endIndex)); } catch (UnknownDBException e) { throw new MetaException(e.getMessage()); } if (tables != null && !tables.isEmpty()) { for (Table table : tables) { // If the table is not external and it might not be in a subdirectory of the database // add it's locations to the list of paths to delete Path tablePath = null; boolean tableDataShouldBeDeleted = checkTableDataShouldBeDeleted(table, deleteData); boolean isManagedTable = table.getTableType().equals(TableType.MANAGED_TABLE.toString()); if (table.getSd().getLocation() != null && tableDataShouldBeDeleted) { tablePath = wh.getDnsPath(new Path(table.getSd().getLocation())); if (!isManagedTable) { if (!wh.isWritable(tablePath.getParent())) { throw new MetaException( "Database metadata not deleted since table: " + table.getTableName() + " has a parent location " + tablePath.getParent() + " which is not writable by " + SecurityUtils.getUser()); } tablePaths.add(tablePath); } } // For each partition in each table, drop the partitions and get a list of // partitions' locations which might need to be deleted partitionPaths = dropPartitionsAndGetLocations(ms, catName, name, table.getTableName(), tablePath, tableDataShouldBeDeleted); // Drop the table but not its data drop_table_with_environment_context( MetaStoreUtils.prependCatalogToDbName(table.getCatName(), table.getDbName(), conf), table.getTableName(), false, null, false); } } startIndex = endIndex; } if (ms.dropDatabase(catName, name)) { if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DROP_DATABASE, new DropDatabaseEvent(db, true, this, isReplicated)); } success = ms.commitTransaction(); } } finally { if (!success) { ms.rollbackTransaction(); } else if (deleteData) { // Delete the data in the partitions which have other locations deletePartitionData(partitionPaths, false, db); // Delete the data in the tables which have other locations for (Path tablePath : tablePaths) { deleteTableData(tablePath, false, db); } final Database dbFinal = db; final Path path = (dbFinal.getManagedLocationUri() != null) ? new Path(dbFinal.getManagedLocationUri()) : wh.getDatabaseManagedPath(dbFinal); try { Boolean deleted = UserGroupInformation.getLoginUser().doAs(new PrivilegedExceptionAction<Boolean>() { @Override public Boolean run() throws IOException, MetaException { return wh.deleteDir(path, true, dbFinal); } }); if (!deleted) { LOG.error("Failed to delete database's managed warehouse directory: " + path); } } catch (Exception e) { LOG.error("Failed to delete database's managed warehouse directory: " + path + " " + e.getMessage()); } try { Boolean deleted = UserGroupInformation.getCurrentUser().doAs(new PrivilegedExceptionAction<Boolean>() { @Override public Boolean run() throws MetaException { return wh.deleteDir(new Path(dbFinal.getLocationUri()), true, dbFinal); } }); if (!deleted) { LOG.error("Failed to delete database external warehouse directory " + db.getLocationUri()); } } catch (IOException | InterruptedException | UndeclaredThrowableException e) { LOG.error("Failed to delete the database external warehouse directory: " + db.getLocationUri() + " " + e .getMessage()); } } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_DATABASE, new DropDatabaseEvent(db, success, this, isReplicated), null, transactionalListenerResponses, ms); } } } private boolean drop_remote_database_core(RawStore ms, final Database db) throws MetaException, NoSuchObjectException { boolean success = false; firePreEvent(new PreDropDatabaseEvent(db, this)); if (ms.dropDatabase(db.getCatalogName(), db.getName())) { success = ms.commitTransaction(); } return success; } @Override public void drop_database(final String dbName, final boolean deleteData, final boolean cascade) throws NoSuchObjectException, InvalidOperationException, MetaException { startFunction("drop_database", ": " + dbName); String[] parsedDbName = parseDbName(dbName, conf); if (DEFAULT_CATALOG_NAME.equalsIgnoreCase(parsedDbName[CAT_NAME]) && DEFAULT_DATABASE_NAME.equalsIgnoreCase(parsedDbName[DB_NAME])) { endFunction("drop_database", false, null); throw new MetaException("Can not drop " + DEFAULT_DATABASE_NAME + " database in catalog " + DEFAULT_CATALOG_NAME); } boolean success = false; Exception ex = null; try { drop_database_core(getMS(), parsedDbName[CAT_NAME], parsedDbName[DB_NAME], deleteData, cascade); success = true; } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(NoSuchObjectException.class, InvalidOperationException.class, MetaException.class) .defaultMetaException(); } finally { endFunction("drop_database", success, ex); } } @Override public List<String> get_databases(final String pattern) throws MetaException { startFunction("get_databases", ": " + pattern); String[] parsedDbNamed = parseDbName(pattern, conf); List<String> ret = null; Exception ex = null; try { if (parsedDbNamed[DB_NAME] == null) { ret = getMS().getAllDatabases(parsedDbNamed[CAT_NAME]); ret = FilterUtils.filterDbNamesIfEnabled(isServerFilterEnabled, filterHook, ret); } else { ret = getMS().getDatabases(parsedDbNamed[CAT_NAME], parsedDbNamed[DB_NAME]); ret = FilterUtils.filterDbNamesIfEnabled(isServerFilterEnabled, filterHook, ret); } } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_databases", ret != null, ex); } return ret; } @Override public List<String> get_all_databases() throws MetaException { // get_databases filters results already. No need to filter here return get_databases(MetaStoreUtils.prependCatalogToDbName(null, null, conf)); } private void create_dataconnector_core(RawStore ms, final DataConnector connector) throws AlreadyExistsException, InvalidObjectException, MetaException { if (!MetaStoreUtils.validateName(connector.getName(), conf)) { throw new InvalidObjectException(connector.getName() + " is not a valid dataconnector name"); } if (connector.getOwnerName() == null){ try { connector.setOwnerName(SecurityUtils.getUGI().getShortUserName()); }catch (Exception e){ LOG.warn("Failed to get owner name for create dataconnector operation.", e); } } long time = System.currentTimeMillis()/1000; connector.setCreateTime((int) time); boolean success = false; Map<String, String> transactionalListenersResponses = Collections.emptyMap(); try { firePreEvent(new PreCreateDataConnectorEvent(connector, this)); ms.openTransaction(); ms.createDataConnector(connector); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.CREATE_DATACONNECTOR, new CreateDataConnectorEvent(connector, true, this)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.CREATE_DATACONNECTOR, new CreateDataConnectorEvent(connector, success, this), null, transactionalListenersResponses, ms); } } } @Override public void create_dataconnector(final DataConnector connector) throws AlreadyExistsException, InvalidObjectException, MetaException { startFunction("create_dataconnector", ": " + connector.toString()); boolean success = false; Exception ex = null; try { try { if (null != get_dataconnector_core(connector.getName())) { throw new AlreadyExistsException("DataConnector " + connector.getName() + " already exists"); } } catch (NoSuchObjectException e) { // expected } if (testTimeoutEnabled) { try { Thread.sleep(testTimeoutValue); } catch (InterruptedException e) { // do nothing } Deadline.checkTimeout(); } create_dataconnector_core(getMS(), connector); success = true; } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(MetaException.class, InvalidObjectException.class, AlreadyExistsException.class) .defaultMetaException(); } finally { endFunction("create_connector", success, ex); } } @Override public DataConnector get_dataconnector_core(final String name) throws NoSuchObjectException, MetaException { DataConnector connector = null; if (name == null) { throw new MetaException("Data connector name cannot be null."); } try { connector = getMS().getDataConnector(name); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class, NoSuchObjectException.class) .defaultRuntimeException(); } return connector; } @Override public DataConnector get_dataconnector_req(GetDataConnectorRequest request) throws NoSuchObjectException, MetaException { startFunction("get_dataconnector", ": " + request.getConnectorName()); DataConnector connector = null; Exception ex = null; try { connector = get_dataconnector_core(request.getConnectorName()); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, NoSuchObjectException.class) .defaultRuntimeException(); } finally { endFunction("get_dataconnector", connector != null, ex); } return connector; } @Override public void alter_dataconnector(final String dcName, final DataConnector newDC) throws TException { startFunction("alter_dataconnector " + dcName); boolean success = false; Exception ex = null; RawStore ms = getMS(); DataConnector oldDC = null; Map<String, String> transactionalListenersResponses = Collections.emptyMap(); try { oldDC = get_dataconnector_core(dcName); if (oldDC == null) { throw new MetaException("Could not alter dataconnector \"" + dcName + "\". Could not retrieve old definition."); } // firePreEvent(new PreAlterDatabaseEvent(oldDC, newDC, this)); ms.openTransaction(); ms.alterDataConnector(dcName, newDC); /* if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ALTER_DATACONNECTOR, new AlterDataConnectorEvent(oldDC, newDC, true, this)); } */ success = ms.commitTransaction(); } catch (MetaException|NoSuchObjectException e) { ex = e; throw e; } finally { if (!success) { ms.rollbackTransaction(); } /* if ((null != oldDC) && (!listeners.isEmpty())) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_DATACONNECTOR, new AlterDataConnectorEvent(oldDC, newDC, success, this), null, transactionalListenersResponses, ms); } */ endFunction("alter_database", success, ex); } } @Override public List<String> get_dataconnectors() throws MetaException { startFunction("get_dataconnectors"); List<String> ret = null; Exception ex = null; try { ret = getMS().getAllDataConnectorNames(); ret = FilterUtils.filterDataConnectorsIfEnabled(isServerFilterEnabled, filterHook, ret); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_dataconnectors", ret != null, ex); } return ret; } @Override public void drop_dataconnector(final String dcName, boolean ifNotExists, boolean checkReferences) throws NoSuchObjectException, InvalidOperationException, MetaException { startFunction("drop_dataconnector", ": " + dcName); boolean success = false; DataConnector connector = null; Exception ex = null; RawStore ms = getMS(); try { ms.openTransaction(); connector = getMS().getDataConnector(dcName); if (connector == null) { if (!ifNotExists) { throw new NoSuchObjectException("DataConnector " + dcName + " doesn't exist"); } else { return; } } // TODO find DBs with references to this connector // if any existing references and checkReferences=true, do not drop // firePreEvent(new PreDropTableEvent(tbl, deleteData, this)); if (!ms.dropDataConnector(dcName)) { throw new MetaException("Unable to drop dataconnector " + dcName); } else { /* // TODO if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DROP_TABLE, new DropTableEvent(tbl, true, deleteData, this, isReplicated), envContext); } */ success = ms.commitTransaction(); } } finally { if (!success) { ms.rollbackTransaction(); } /* if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_TABLE, new DropTableEvent(tbl, success, deleteData, this, isReplicated), envContext, transactionalListenerResponses, ms); } */ endFunction("drop_dataconnector", success, ex); } } private void create_type_core(final RawStore ms, final Type type) throws AlreadyExistsException, MetaException, InvalidObjectException { if (!MetaStoreUtils.validateName(type.getName(), null)) { throw new InvalidObjectException("Invalid type name"); } boolean success = false; try { ms.openTransaction(); if (is_type_exists(ms, type.getName())) { throw new AlreadyExistsException("Type " + type.getName() + " already exists"); } ms.createType(type); success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } } } @Override public boolean create_type(final Type type) throws AlreadyExistsException, MetaException, InvalidObjectException { startFunction("create_type", ": " + type.toString()); boolean success = false; Exception ex = null; try { create_type_core(getMS(), type); success = true; } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(MetaException.class, InvalidObjectException.class, AlreadyExistsException.class) .defaultMetaException(); } finally { endFunction("create_type", success, ex); } return success; } @Override public Type get_type(final String name) throws MetaException, NoSuchObjectException { startFunction("get_type", ": " + name); Type ret = null; Exception ex = null; try { ret = getMS().getType(name); if (null == ret) { throw new NoSuchObjectException("Type \"" + name + "\" not found."); } } catch (Exception e) { ex = e; throwMetaException(e); } finally { endFunction("get_type", ret != null, ex); } return ret; } private boolean is_type_exists(RawStore ms, String typeName) throws MetaException { return (ms.getType(typeName) != null); } @Override public boolean drop_type(final String name) throws MetaException, NoSuchObjectException { startFunction("drop_type", ": " + name); boolean success = false; Exception ex = null; try { // TODO:pc validate that there are no types that refer to this success = getMS().dropType(name); } catch (Exception e) { ex = e; throwMetaException(e); } finally { endFunction("drop_type", success, ex); } return success; } @Override public Map<String, Type> get_type_all(String name) throws MetaException { // TODO Auto-generated method stub startFunction("get_type_all", ": " + name); endFunction("get_type_all", false, null); throw new MetaException("Not yet implemented"); } @Override public Table translate_table_dryrun(final Table tbl) throws AlreadyExistsException, MetaException, InvalidObjectException, InvalidInputException { Table transformedTbl = null; if (!tbl.isSetCatName()) { tbl.setCatName(getDefaultCatalog(conf)); } if (transformer != null) { transformedTbl = transformer.transformCreateTable(tbl, null, null); } return transformedTbl != null ? transformedTbl : tbl; } private void create_table_core(final RawStore ms, final Table tbl, final EnvironmentContext envContext) throws AlreadyExistsException, MetaException, InvalidObjectException, NoSuchObjectException, InvalidInputException { CreateTableRequest req = new CreateTableRequest(tbl); req.setEnvContext(envContext); create_table_core(ms, req); } private void create_table_core(final RawStore ms, final Table tbl, final EnvironmentContext envContext, List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys, List<SQLUniqueConstraint> uniqueConstraints, List<SQLNotNullConstraint> notNullConstraints, List<SQLDefaultConstraint> defaultConstraints, List<SQLCheckConstraint> checkConstraints, List<String> processorCapabilities, String processorIdentifier) throws AlreadyExistsException, MetaException, InvalidObjectException, NoSuchObjectException, InvalidInputException { CreateTableRequest req = new CreateTableRequest(tbl); if (envContext != null) { req.setEnvContext(envContext); } if (primaryKeys != null) { req.setPrimaryKeys(primaryKeys); } if (foreignKeys != null) { req.setForeignKeys(foreignKeys); } if (uniqueConstraints != null) { req.setUniqueConstraints(uniqueConstraints); } if (notNullConstraints != null) { req.setNotNullConstraints(notNullConstraints); } if (defaultConstraints != null) { req.setDefaultConstraints(defaultConstraints); } if (checkConstraints != null) { req.setCheckConstraints(checkConstraints); } if (processorCapabilities != null) { req.setProcessorCapabilities(processorCapabilities); req.setProcessorIdentifier(processorIdentifier); } create_table_core(ms, req); } private void create_table_core(final RawStore ms, final CreateTableRequest req) throws AlreadyExistsException, MetaException, InvalidObjectException, NoSuchObjectException, InvalidInputException { ColumnStatistics colStats = null; Table tbl = req.getTable(); EnvironmentContext envContext = req.getEnvContext(); SQLAllTableConstraints constraints = new SQLAllTableConstraints(); constraints.setPrimaryKeys(req.getPrimaryKeys()); constraints.setForeignKeys(req.getForeignKeys()); constraints.setUniqueConstraints(req.getUniqueConstraints()); constraints.setDefaultConstraints(req.getDefaultConstraints()); constraints.setCheckConstraints(req.getCheckConstraints()); constraints.setNotNullConstraints(req.getNotNullConstraints()); List<String> processorCapabilities = req.getProcessorCapabilities(); String processorId = req.getProcessorIdentifier(); // To preserve backward compatibility throw MetaException in case of null database if (tbl.getDbName() == null) { throw new MetaException("Null database name is not allowed"); } if (!MetaStoreUtils.validateName(tbl.getTableName(), conf)) { throw new InvalidObjectException(tbl.getTableName() + " is not a valid object name"); } if (!tbl.isSetCatName()) { tbl.setCatName(getDefaultCatalog(conf)); } Database db = get_database_core(tbl.getCatName(), tbl.getDbName()); if (db != null && db.getType().equals(DatabaseType.REMOTE)) { // HIVE-24425: Create table in REMOTE db should fail throw new MetaException("Create table in REMOTE database " + db.getName() + " is not allowed"); } if (transformer != null) { tbl = transformer.transformCreateTable(tbl, processorCapabilities, processorId); } if (tbl.getParameters() != null) { tbl.getParameters().remove(TABLE_IS_CTAS); } // If the given table has column statistics, save it here. We will update it later. // We don't want it to be part of the Table object being created, lest the create table // event will also have the col stats which we don't want. if (tbl.isSetColStats()) { colStats = tbl.getColStats(); tbl.unsetColStats(); } String validate = MetaStoreServerUtils.validateTblColumns(tbl.getSd().getCols()); if (validate != null) { throw new InvalidObjectException("Invalid column " + validate); } if (tbl.getPartitionKeys() != null) { validate = MetaStoreServerUtils.validateTblColumns(tbl.getPartitionKeys()); if (validate != null) { throw new InvalidObjectException("Invalid partition column " + validate); } } if (tbl.isSetId()) { LOG.debug("Id shouldn't be set but table {}.{} has the Id set to {}. Id is ignored.", tbl.getDbName(), tbl.getTableName(), tbl.getId()); tbl.unsetId(); } SkewedInfo skew = tbl.getSd().getSkewedInfo(); if (skew != null) { validate = MetaStoreServerUtils.validateSkewedColNames(skew.getSkewedColNames()); if (validate != null) { throw new InvalidObjectException("Invalid skew column " + validate); } validate = MetaStoreServerUtils.validateSkewedColNamesSubsetCol( skew.getSkewedColNames(), tbl.getSd().getCols()); if (validate != null) { throw new InvalidObjectException("Invalid skew column " + validate); } } Map<String, String> transactionalListenerResponses = Collections.emptyMap(); Path tblPath = null; boolean success = false, madeDir = false; boolean isReplicated = false; try { firePreEvent(new PreCreateTableEvent(tbl, this)); ms.openTransaction(); db = ms.getDatabase(tbl.getCatName(), tbl.getDbName()); isReplicated = isDbReplicationTarget(db); // get_table checks whether database exists, it should be moved here if (is_table_exists(ms, tbl.getCatName(), tbl.getDbName(), tbl.getTableName())) { throw new AlreadyExistsException("Table " + getCatalogQualifiedTableName(tbl) + " already exists"); } if (!TableType.VIRTUAL_VIEW.toString().equals(tbl.getTableType())) { if (tbl.getSd().getLocation() == null || tbl.getSd().getLocation().isEmpty()) { tblPath = wh.getDefaultTablePath(db, tbl); } else { if (!isExternal(tbl) && !MetaStoreUtils.isNonNativeTable(tbl)) { LOG.warn("Location: " + tbl.getSd().getLocation() + " specified for non-external table:" + tbl.getTableName()); } tblPath = wh.getDnsPath(new Path(tbl.getSd().getLocation())); } tbl.getSd().setLocation(tblPath.toString()); } if (tblPath != null) { if (!wh.isDir(tblPath)) { if (!wh.mkdirs(tblPath)) { throw new MetaException(tblPath + " is not a directory or unable to create one"); } madeDir = true; } } if (MetastoreConf.getBoolVar(conf, ConfVars.STATS_AUTO_GATHER) && !MetaStoreUtils.isView(tbl)) { MetaStoreServerUtils.updateTableStatsSlow(db, tbl, wh, madeDir, false, envContext); } // set create time long time = System.currentTimeMillis() / 1000; tbl.setCreateTime((int) time); if (tbl.getParameters() == null || tbl.getParameters().get(hive_metastoreConstants.DDL_TIME) == null) { tbl.putToParameters(hive_metastoreConstants.DDL_TIME, Long.toString(time)); } if (CollectionUtils.isEmpty(constraints.getPrimaryKeys()) && CollectionUtils.isEmpty(constraints.getForeignKeys()) && CollectionUtils.isEmpty(constraints.getUniqueConstraints())&& CollectionUtils.isEmpty(constraints.getNotNullConstraints())&& CollectionUtils.isEmpty(constraints.getDefaultConstraints()) && CollectionUtils.isEmpty(constraints.getCheckConstraints())) { ms.createTable(tbl); } else { final String catName = tbl.getCatName(); // Check that constraints have catalog name properly set first if (CollectionUtils.isNotEmpty(constraints.getPrimaryKeys()) && !constraints.getPrimaryKeys().get(0).isSetCatName()) { constraints.getPrimaryKeys().forEach(constraint -> constraint.setCatName(catName)); } if (CollectionUtils.isNotEmpty(constraints.getForeignKeys()) && !constraints.getForeignKeys().get(0).isSetCatName()) { constraints.getForeignKeys().forEach(constraint -> constraint.setCatName(catName)); } if (CollectionUtils.isNotEmpty(constraints.getUniqueConstraints()) && !constraints.getUniqueConstraints().get(0).isSetCatName()) { constraints.getUniqueConstraints().forEach(constraint -> constraint.setCatName(catName)); } if (CollectionUtils.isNotEmpty(constraints.getNotNullConstraints()) && !constraints.getNotNullConstraints().get(0).isSetCatName()) { constraints.getNotNullConstraints().forEach(constraint -> constraint.setCatName(catName)); } if (CollectionUtils.isNotEmpty(constraints.getDefaultConstraints()) && !constraints.getDefaultConstraints().get(0).isSetCatName()) { constraints.getDefaultConstraints().forEach(constraint -> constraint.setCatName(catName)); } if (CollectionUtils.isNotEmpty(constraints.getCheckConstraints()) && !constraints.getCheckConstraints().get(0).isSetCatName()) { constraints.getCheckConstraints().forEach(constraint -> constraint.setCatName(catName)); } // Set constraint name if null before sending to listener constraints = ms.createTableWithConstraints(tbl, constraints); } if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.CREATE_TABLE, new CreateTableEvent(tbl, true, this, isReplicated), envContext); if (CollectionUtils.isNotEmpty(constraints.getPrimaryKeys())) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_PRIMARYKEY, new AddPrimaryKeyEvent(constraints.getPrimaryKeys(), true, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getForeignKeys())) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_FOREIGNKEY, new AddForeignKeyEvent(constraints.getForeignKeys(), true, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getUniqueConstraints())) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_UNIQUECONSTRAINT, new AddUniqueConstraintEvent(constraints.getUniqueConstraints(), true, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getNotNullConstraints())) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_NOTNULLCONSTRAINT, new AddNotNullConstraintEvent(constraints.getNotNullConstraints(), true, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getCheckConstraints())) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_CHECKCONSTRAINT, new AddCheckConstraintEvent(constraints.getCheckConstraints(), true, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getDefaultConstraints())) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_DEFAULTCONSTRAINT, new AddDefaultConstraintEvent(constraints.getDefaultConstraints(), true, this), envContext); } } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); if (madeDir) { wh.deleteDir(tblPath, true, false, ReplChangeManager.shouldEnableCm(db, tbl)); } } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.CREATE_TABLE, new CreateTableEvent(tbl, success, this, isReplicated), envContext, transactionalListenerResponses, ms); if (CollectionUtils.isNotEmpty(constraints.getPrimaryKeys())) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_PRIMARYKEY, new AddPrimaryKeyEvent(constraints.getPrimaryKeys(), success, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getForeignKeys())) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_FOREIGNKEY, new AddForeignKeyEvent(constraints.getForeignKeys(), success, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getUniqueConstraints())) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_UNIQUECONSTRAINT, new AddUniqueConstraintEvent(constraints.getUniqueConstraints(), success, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getNotNullConstraints())) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_NOTNULLCONSTRAINT, new AddNotNullConstraintEvent(constraints.getNotNullConstraints(), success, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getDefaultConstraints())) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_DEFAULTCONSTRAINT, new AddDefaultConstraintEvent(constraints.getDefaultConstraints(), success, this), envContext); } if (CollectionUtils.isNotEmpty(constraints.getCheckConstraints())) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_CHECKCONSTRAINT, new AddCheckConstraintEvent(constraints.getCheckConstraints(), success, this), envContext); } } } // If the table has column statistics, update it into the metastore. We need a valid // writeId list to update column statistics for a transactional table. But during bootstrap // replication, where we use this feature, we do not have a valid writeId list which was // used to update the stats. But we know for sure that the writeId associated with the // stats was valid then (otherwise stats update would have failed on the source). So, craft // a valid transaction list with only that writeId and use it to update the stats. if (colStats != null) { long writeId = tbl.getWriteId(); String validWriteIds = null; if (writeId > 0) { ValidWriteIdList validWriteIdList = new ValidReaderWriteIdList(TableName.getDbTable(tbl.getDbName(), tbl.getTableName()), new long[0], new BitSet(), writeId); validWriteIds = validWriteIdList.toString(); } updateTableColumnStatsInternal(colStats, validWriteIds, tbl.getWriteId()); } } @Override public void create_table(final Table tbl) throws AlreadyExistsException, MetaException, InvalidObjectException, InvalidInputException { create_table_with_environment_context(tbl, null); } @Override public void create_table_with_environment_context(final Table tbl, final EnvironmentContext envContext) throws AlreadyExistsException, MetaException, InvalidObjectException, InvalidInputException { startFunction("create_table", ": " + tbl.toString()); boolean success = false; Exception ex = null; try { create_table_core(getMS(), tbl, envContext); success = true; } catch (Exception e) { LOG.warn("create_table_with_environment_context got ", e); ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidObjectException.class) .throwIfInstance(AlreadyExistsException.class, InvalidInputException.class) .convertIfInstance(NoSuchObjectException.class, InvalidObjectException.class) .defaultMetaException(); } finally { endFunction("create_table", success, ex, tbl.getTableName()); } } @Override public void create_table_req(final CreateTableRequest req) throws AlreadyExistsException, MetaException, InvalidObjectException, InvalidInputException { Table tbl = req.getTable(); startFunction("create_table_req", ": " + tbl.toString()); boolean success = false; Exception ex = null; try { create_table_core(getMS(), req); success = true; } catch (Exception e) { LOG.warn("create_table_req got ", e); ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidObjectException.class) .throwIfInstance(AlreadyExistsException.class, InvalidInputException.class) .convertIfInstance(NoSuchObjectException.class, InvalidObjectException.class) .defaultMetaException(); } finally { endFunction("create_table_req", success, ex, tbl.getTableName()); } } @Override public void create_table_with_constraints(final Table tbl, final List<SQLPrimaryKey> primaryKeys, final List<SQLForeignKey> foreignKeys, List<SQLUniqueConstraint> uniqueConstraints, List<SQLNotNullConstraint> notNullConstraints, List<SQLDefaultConstraint> defaultConstraints, List<SQLCheckConstraint> checkConstraints) throws AlreadyExistsException, MetaException, InvalidObjectException, InvalidInputException { startFunction("create_table", ": " + tbl.toString()); boolean success = false; Exception ex = null; try { CreateTableRequest req = new CreateTableRequest(tbl); req.setPrimaryKeys(primaryKeys); req.setForeignKeys(foreignKeys); req.setUniqueConstraints(uniqueConstraints); req.setNotNullConstraints(notNullConstraints); req.setDefaultConstraints(defaultConstraints); req.setCheckConstraints(checkConstraints); create_table_req(req); success = true; } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidObjectException.class) .throwIfInstance(AlreadyExistsException.class, InvalidInputException.class) .defaultMetaException(); } finally { endFunction("create_table_with_constraints", success, ex, tbl.getTableName()); } } @Override public void drop_constraint(DropConstraintRequest req) throws MetaException, InvalidObjectException { String catName = req.isSetCatName() ? req.getCatName() : getDefaultCatalog(conf); String dbName = req.getDbname(); String tableName = req.getTablename(); String constraintName = req.getConstraintname(); startFunction("drop_constraint", ": " + constraintName); boolean success = false; Exception ex = null; RawStore ms = getMS(); try { ms.openTransaction(); ms.dropConstraint(catName, dbName, tableName, constraintName); if (transactionalListeners.size() > 0) { DropConstraintEvent dropConstraintEvent = new DropConstraintEvent(catName, dbName, tableName, constraintName, true, this); for (MetaStoreEventListener transactionalListener : transactionalListeners) { transactionalListener.onDropConstraint(dropConstraintEvent); } } success = ms.commitTransaction(); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class) .convertIfInstance(NoSuchObjectException.class, InvalidObjectException.class) .defaultMetaException(); } finally { if (!success) { ms.rollbackTransaction(); } else { for (MetaStoreEventListener listener : listeners) { DropConstraintEvent dropConstraintEvent = new DropConstraintEvent(catName, dbName, tableName, constraintName, true, this); listener.onDropConstraint(dropConstraintEvent); } } endFunction("drop_constraint", success, ex, constraintName); } } @Override public void add_primary_key(AddPrimaryKeyRequest req) throws MetaException, InvalidObjectException { List<SQLPrimaryKey> primaryKeyCols = req.getPrimaryKeyCols(); String constraintName = (CollectionUtils.isNotEmpty(primaryKeyCols)) ? primaryKeyCols.get(0).getPk_name() : "null"; startFunction("add_primary_key", ": " + constraintName); boolean success = false; Exception ex = null; if (CollectionUtils.isNotEmpty(primaryKeyCols) && !primaryKeyCols.get(0).isSetCatName()) { String defaultCat = getDefaultCatalog(conf); primaryKeyCols.forEach(pk -> pk.setCatName(defaultCat)); } RawStore ms = getMS(); try { ms.openTransaction(); List<SQLPrimaryKey> primaryKeys = ms.addPrimaryKeys(primaryKeyCols); if (transactionalListeners.size() > 0) { if (CollectionUtils.isNotEmpty(primaryKeys)) { AddPrimaryKeyEvent addPrimaryKeyEvent = new AddPrimaryKeyEvent(primaryKeys, true, this); for (MetaStoreEventListener transactionalListener : transactionalListeners) { transactionalListener.onAddPrimaryKey(addPrimaryKeyEvent); } } } success = ms.commitTransaction(); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidObjectException.class) .defaultMetaException(); } finally { if (!success) { ms.rollbackTransaction(); } else if (primaryKeyCols != null && primaryKeyCols.size() > 0) { for (MetaStoreEventListener listener : listeners) { AddPrimaryKeyEvent addPrimaryKeyEvent = new AddPrimaryKeyEvent(primaryKeyCols, true, this); listener.onAddPrimaryKey(addPrimaryKeyEvent); } } endFunction("add_primary_key", success, ex, constraintName); } } @Override public void add_foreign_key(AddForeignKeyRequest req) throws MetaException, InvalidObjectException { List<SQLForeignKey> foreignKeys = req.getForeignKeyCols(); String constraintName = CollectionUtils.isNotEmpty(foreignKeys) ? foreignKeys.get(0).getFk_name() : "null"; startFunction("add_foreign_key", ": " + constraintName); boolean success = false; Exception ex = null; if (CollectionUtils.isNotEmpty(foreignKeys) && !foreignKeys.get(0).isSetCatName()) { String defaultCat = getDefaultCatalog(conf); foreignKeys.forEach(pk -> pk.setCatName(defaultCat)); } RawStore ms = getMS(); try { ms.openTransaction(); foreignKeys = ms.addForeignKeys(foreignKeys); if (transactionalListeners.size() > 0) { if (CollectionUtils.isNotEmpty(foreignKeys)) { AddForeignKeyEvent addForeignKeyEvent = new AddForeignKeyEvent(foreignKeys, true, this); for (MetaStoreEventListener transactionalListener : transactionalListeners) { transactionalListener.onAddForeignKey(addForeignKeyEvent); } } } success = ms.commitTransaction(); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidObjectException.class) .defaultMetaException(); } finally { if (!success) { ms.rollbackTransaction(); } else if (CollectionUtils.isNotEmpty(foreignKeys)) { for (MetaStoreEventListener listener : listeners) { AddForeignKeyEvent addForeignKeyEvent = new AddForeignKeyEvent(foreignKeys, true, this); listener.onAddForeignKey(addForeignKeyEvent); } } endFunction("add_foreign_key", success, ex, constraintName); } } @Override public void add_unique_constraint(AddUniqueConstraintRequest req) throws MetaException, InvalidObjectException { List<SQLUniqueConstraint> uniqueConstraints = req.getUniqueConstraintCols(); String constraintName = (uniqueConstraints != null && uniqueConstraints.size() > 0) ? uniqueConstraints.get(0).getUk_name() : "null"; startFunction("add_unique_constraint", ": " + constraintName); boolean success = false; Exception ex = null; if (!uniqueConstraints.isEmpty() && !uniqueConstraints.get(0).isSetCatName()) { String defaultCat = getDefaultCatalog(conf); uniqueConstraints.forEach(pk -> pk.setCatName(defaultCat)); } RawStore ms = getMS(); try { ms.openTransaction(); uniqueConstraints = ms.addUniqueConstraints(uniqueConstraints); if (transactionalListeners.size() > 0) { if (CollectionUtils.isNotEmpty(uniqueConstraints)) { AddUniqueConstraintEvent addUniqueConstraintEvent = new AddUniqueConstraintEvent(uniqueConstraints, true, this); for (MetaStoreEventListener transactionalListener : transactionalListeners) { transactionalListener.onAddUniqueConstraint(addUniqueConstraintEvent); } } } success = ms.commitTransaction(); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidObjectException.class) .defaultMetaException(); } finally { if (!success) { ms.rollbackTransaction(); } else if (CollectionUtils.isNotEmpty(uniqueConstraints)) { for (MetaStoreEventListener listener : listeners) { AddUniqueConstraintEvent addUniqueConstraintEvent = new AddUniqueConstraintEvent(uniqueConstraints, true, this); listener.onAddUniqueConstraint(addUniqueConstraintEvent); } } endFunction("add_unique_constraint", success, ex, constraintName); } } @Override public void add_not_null_constraint(AddNotNullConstraintRequest req) throws MetaException, InvalidObjectException { List<SQLNotNullConstraint> notNullConstraints = req.getNotNullConstraintCols(); String constraintName = (notNullConstraints != null && notNullConstraints.size() > 0) ? notNullConstraints.get(0).getNn_name() : "null"; startFunction("add_not_null_constraint", ": " + constraintName); boolean success = false; Exception ex = null; if (!notNullConstraints.isEmpty() && !notNullConstraints.get(0).isSetCatName()) { String defaultCat = getDefaultCatalog(conf); notNullConstraints.forEach(pk -> pk.setCatName(defaultCat)); } RawStore ms = getMS(); try { ms.openTransaction(); notNullConstraints = ms.addNotNullConstraints(notNullConstraints); if (transactionalListeners.size() > 0) { if (CollectionUtils.isNotEmpty(notNullConstraints)) { AddNotNullConstraintEvent addNotNullConstraintEvent = new AddNotNullConstraintEvent(notNullConstraints, true, this); for (MetaStoreEventListener transactionalListener : transactionalListeners) { transactionalListener.onAddNotNullConstraint(addNotNullConstraintEvent); } } } success = ms.commitTransaction(); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidObjectException.class).defaultMetaException(); } finally { if (!success) { ms.rollbackTransaction(); } else if (CollectionUtils.isNotEmpty(notNullConstraints)) { for (MetaStoreEventListener listener : listeners) { AddNotNullConstraintEvent addNotNullConstraintEvent = new AddNotNullConstraintEvent(notNullConstraints, true, this); listener.onAddNotNullConstraint(addNotNullConstraintEvent); } } endFunction("add_not_null_constraint", success, ex, constraintName); } } @Override public void add_default_constraint(AddDefaultConstraintRequest req) throws MetaException, InvalidObjectException { List<SQLDefaultConstraint> defaultConstraints = req.getDefaultConstraintCols(); String constraintName = CollectionUtils.isNotEmpty(defaultConstraints) ? defaultConstraints.get(0).getDc_name() : "null"; startFunction("add_default_constraint", ": " + constraintName); boolean success = false; Exception ex = null; if (!defaultConstraints.isEmpty() && !defaultConstraints.get(0).isSetCatName()) { String defaultCat = getDefaultCatalog(conf); defaultConstraints.forEach(pk -> pk.setCatName(defaultCat)); } RawStore ms = getMS(); try { ms.openTransaction(); defaultConstraints = ms.addDefaultConstraints(defaultConstraints); if (transactionalListeners.size() > 0) { if (CollectionUtils.isNotEmpty(defaultConstraints)) { AddDefaultConstraintEvent addDefaultConstraintEvent = new AddDefaultConstraintEvent(defaultConstraints, true, this); for (MetaStoreEventListener transactionalListener : transactionalListeners) { transactionalListener.onAddDefaultConstraint(addDefaultConstraintEvent); } } } success = ms.commitTransaction(); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidObjectException.class).defaultMetaException(); } finally { if (!success) { ms.rollbackTransaction(); } else if (CollectionUtils.isNotEmpty(defaultConstraints)) { for (MetaStoreEventListener listener : listeners) { AddDefaultConstraintEvent addDefaultConstraintEvent = new AddDefaultConstraintEvent(defaultConstraints, true, this); listener.onAddDefaultConstraint(addDefaultConstraintEvent); } } endFunction("add_default_constraint", success, ex, constraintName); } } @Override public void add_check_constraint(AddCheckConstraintRequest req) throws MetaException, InvalidObjectException { List<SQLCheckConstraint> checkConstraints= req.getCheckConstraintCols(); String constraintName = CollectionUtils.isNotEmpty(checkConstraints) ? checkConstraints.get(0).getDc_name() : "null"; startFunction("add_check_constraint", ": " + constraintName); boolean success = false; Exception ex = null; if (!checkConstraints.isEmpty() && !checkConstraints.get(0).isSetCatName()) { String defaultCat = getDefaultCatalog(conf); checkConstraints.forEach(pk -> pk.setCatName(defaultCat)); } RawStore ms = getMS(); try { ms.openTransaction(); checkConstraints = ms.addCheckConstraints(checkConstraints); if (transactionalListeners.size() > 0) { if (CollectionUtils.isNotEmpty(checkConstraints)) { AddCheckConstraintEvent addcheckConstraintEvent = new AddCheckConstraintEvent(checkConstraints, true, this); for (MetaStoreEventListener transactionalListener : transactionalListeners) { transactionalListener.onAddCheckConstraint(addcheckConstraintEvent); } } } success = ms.commitTransaction(); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidObjectException.class).defaultMetaException(); } finally { if (!success) { ms.rollbackTransaction(); } else if (CollectionUtils.isNotEmpty(checkConstraints)) { for (MetaStoreEventListener listener : listeners) { AddCheckConstraintEvent addCheckConstraintEvent = new AddCheckConstraintEvent(checkConstraints, true, this); listener.onAddCheckConstraint(addCheckConstraintEvent); } } endFunction("add_check_constraint", success, ex, constraintName); } } private boolean is_table_exists(RawStore ms, String catName, String dbname, String name) throws MetaException { return (ms.getTable(catName, dbname, name, null) != null); } private boolean drop_table_core(final RawStore ms, final String catName, final String dbname, final String name, final boolean deleteData, final EnvironmentContext envContext, final String indexName, boolean dropPartitions) throws NoSuchObjectException, MetaException, IOException, InvalidObjectException, InvalidInputException { boolean success = false; boolean tableDataShouldBeDeleted = false; Path tblPath = null; List<Path> partPaths = null; Table tbl = null; boolean ifPurge = false; Map<String, String> transactionalListenerResponses = Collections.emptyMap(); Database db = null; boolean isReplicated = false; try { ms.openTransaction(); // HIVE-25282: Drop/Alter table in REMOTE db should fail db = ms.getDatabase(catName, dbname); if (db.getType() == DatabaseType.REMOTE) { throw new MetaException("Drop table in REMOTE database " + db.getName() + " is not allowed"); } isReplicated = isDbReplicationTarget(db); // drop any partitions GetTableRequest req = new GetTableRequest(dbname,name); req.setCatName(catName); tbl = get_table_core(req); if (tbl == null) { throw new NoSuchObjectException(name + " doesn't exist"); } // Check if table is part of a materialized view. // If it is, it cannot be dropped. List<String> isPartOfMV = ms.isPartOfMaterializedView(catName, dbname, name); if (!isPartOfMV.isEmpty()) { throw new MetaException(String.format("Cannot drop table as it is used in the following materialized" + " views %s%n", isPartOfMV)); } if (tbl.getSd() == null) { throw new MetaException("Table metadata is corrupted"); } ifPurge = isMustPurge(envContext, tbl); firePreEvent(new PreDropTableEvent(tbl, deleteData, this)); tableDataShouldBeDeleted = checkTableDataShouldBeDeleted(tbl, deleteData); if (tbl.getSd().getLocation() != null) { tblPath = new Path(tbl.getSd().getLocation()); if (!wh.isWritable(tblPath.getParent())) { String target = indexName == null ? "Table" : "Index table"; throw new MetaException(target + " metadata not deleted since " + tblPath.getParent() + " is not writable by " + SecurityUtils.getUser()); } } // Drop the partitions and get a list of locations which need to be deleted // In case of drop database cascade we need not to drop the partitions, they are already dropped. if (dropPartitions) { partPaths = dropPartitionsAndGetLocations(ms, catName, dbname, name, tblPath, tableDataShouldBeDeleted); } // Drop any constraints on the table ms.dropConstraint(catName, dbname, name, null, true); if (!ms.dropTable(catName, dbname, name)) { String tableName = TableName.getQualified(catName, dbname, name); throw new MetaException(indexName == null ? "Unable to drop table " + tableName: "Unable to drop index table " + tableName + " for index " + indexName); } else { if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DROP_TABLE, new DropTableEvent(tbl, true, deleteData, this, isReplicated), envContext); } success = ms.commitTransaction(); } } finally { if (!success) { ms.rollbackTransaction(); } else if (tableDataShouldBeDeleted) { // Data needs deletion. Check if trash may be skipped. // Delete the data in the partitions which have other locations deletePartitionData(partPaths, ifPurge, ReplChangeManager.shouldEnableCm(db, tbl)); // Delete the data in the table deleteTableData(tblPath, ifPurge, ReplChangeManager.shouldEnableCm(db, tbl)); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_TABLE, new DropTableEvent(tbl, success, deleteData, this, isReplicated), envContext, transactionalListenerResponses, ms); } } return success; } private boolean checkTableDataShouldBeDeleted(Table tbl, boolean deleteData) { if (deleteData && isExternal(tbl)) { // External table data can be deleted if EXTERNAL_TABLE_PURGE is true return isExternalTablePurge(tbl); } return deleteData; } /** * Deletes the data in a table's location, if it fails logs an error * * @param tablePath * @param ifPurge completely purge the table (skipping trash) while removing * data from warehouse * @param shouldEnableCm If cm should be enabled */ private void deleteTableData(Path tablePath, boolean ifPurge, boolean shouldEnableCm) { if (tablePath != null) { deleteDataExcludeCmroot(tablePath, ifPurge, shouldEnableCm); } } /** * Deletes the data in a table's location, if it fails logs an error. * * @param tablePath * @param ifPurge completely purge the table (skipping trash) while removing * data from warehouse * @param db Database */ private void deleteTableData(Path tablePath, boolean ifPurge, Database db) { if (tablePath != null) { try { wh.deleteDir(tablePath, true, ifPurge, db); } catch (Exception e) { LOG.error("Failed to delete table directory: " + tablePath + " " + e.getMessage()); } } } /** * Give a list of partitions' locations, tries to delete each one * and for each that fails logs an error. * * @param partPaths * @param ifPurge completely purge the partition (skipping trash) while * removing data from warehouse * @param shouldEnableCm If cm should be enabled */ private void deletePartitionData(List<Path> partPaths, boolean ifPurge, boolean shouldEnableCm) { if (partPaths != null && !partPaths.isEmpty()) { for (Path partPath : partPaths) { deleteDataExcludeCmroot(partPath, ifPurge, shouldEnableCm); } } } /** * Give a list of partitions' locations, tries to delete each one * and for each that fails logs an error. * * @param partPaths * @param ifPurge completely purge the partition (skipping trash) while * removing data from warehouse * @param db Database */ private void deletePartitionData(List<Path> partPaths, boolean ifPurge, Database db) { if (partPaths != null && !partPaths.isEmpty()) { for (Path partPath : partPaths) { try { wh.deleteDir(partPath, true, ifPurge, db); } catch (Exception e) { LOG.error("Failed to delete partition directory: " + partPath + " " + e.getMessage()); } } } } /** * Delete data from path excluding cmdir * and for each that fails logs an error. * * @param path * @param ifPurge completely purge the partition (skipping trash) while * removing data from warehouse * @param shouldEnableCm If cm should be enabled */ private void deleteDataExcludeCmroot(Path path, boolean ifPurge, boolean shouldEnableCm) { try { if (shouldEnableCm) { //Don't delete cmdir if its inside the partition path FileStatus[] statuses = path.getFileSystem(conf).listStatus(path, ReplChangeManager.CMROOT_PATH_FILTER); for (final FileStatus status : statuses) { wh.deleteDir(status.getPath(), true, ifPurge, shouldEnableCm); } //Check if table directory is empty, delete it FileStatus[] statusWithoutFilter = path.getFileSystem(conf).listStatus(path); if (statusWithoutFilter.length == 0) { wh.deleteDir(path, true, ifPurge, shouldEnableCm); } } else { //If no cm delete the complete table directory wh.deleteDir(path, true, ifPurge, shouldEnableCm); } } catch (Exception e) { LOG.error("Failed to delete directory: " + path + " " + e.getMessage()); } } /** * Deletes the partitions specified by catName, dbName, tableName. If checkLocation is true, for * locations of partitions which may not be subdirectories of tablePath checks to make sure the * locations are writable. * * Drops the metadata for each partition. * * Provides a list of locations of partitions which may not be subdirectories of tablePath. * * @param ms RawStore to use for metadata retrieval and delete * @param catName The catName * @param dbName The dbName * @param tableName The tableName * @param tablePath The tablePath of which subdirectories does not have to be checked * @param checkLocation Should we check the locations at all * @return The list of the Path objects to delete (only in case checkLocation is true) * @throws MetaException * @throws IOException * @throws NoSuchObjectException */ private List<Path> dropPartitionsAndGetLocations(RawStore ms, String catName, String dbName, String tableName, Path tablePath, boolean checkLocation) throws MetaException, IOException, NoSuchObjectException { int batchSize = MetastoreConf.getIntVar(conf, ConfVars.BATCH_RETRIEVE_OBJECTS_MAX); String tableDnsPath = null; if (tablePath != null) { tableDnsPath = wh.getDnsPath(tablePath).toString(); } List<Path> partPaths = new ArrayList<>(); while (true) { Map<String, String> partitionLocations = ms.getPartitionLocations(catName, dbName, tableName, tableDnsPath, batchSize); if (partitionLocations == null || partitionLocations.isEmpty()) { // No more partitions left to drop. Return with the collected path list to delete. return partPaths; } if (checkLocation) { for (String partName : partitionLocations.keySet()) { String pathString = partitionLocations.get(partName); if (pathString != null) { Path partPath = wh.getDnsPath(new Path(pathString)); // Double check here. Maybe Warehouse.getDnsPath revealed relationship between the // path objects if (tableDnsPath == null || !FileUtils.isSubdirectory(tableDnsPath, partPath.toString())) { if (!wh.isWritable(partPath.getParent())) { throw new MetaException("Table metadata not deleted since the partition " + partName + " has parent location " + partPath.getParent() + " which is not writable by " + SecurityUtils.getUser()); } partPaths.add(partPath); } } } } for (MetaStoreEventListener listener : listeners) { //No drop part listener events fired for public listeners historically, for drop table case. //Limiting to internal listeners for now, to avoid unexpected calls for public listeners. if (listener instanceof HMSMetricsListener) { for (@SuppressWarnings("unused") String partName : partitionLocations.keySet()) { listener.onDropPartition(null); } } } ms.dropPartitions(catName, dbName, tableName, new ArrayList<>(partitionLocations.keySet())); } } @Override public void drop_table(final String dbname, final String name, final boolean deleteData) throws NoSuchObjectException, MetaException { drop_table_with_environment_context(dbname, name, deleteData, null); } @Override public void drop_table_with_environment_context(final String dbname, final String name, final boolean deleteData, final EnvironmentContext envContext) throws NoSuchObjectException, MetaException { drop_table_with_environment_context(dbname, name, deleteData, envContext, true); } private void drop_table_with_environment_context(final String dbname, final String name, final boolean deleteData, final EnvironmentContext envContext, boolean dropPartitions) throws MetaException { String[] parsedDbName = parseDbName(dbname, conf); startTableFunction("drop_table", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], name); boolean success = false; Exception ex = null; try { success = drop_table_core(getMS(), parsedDbName[CAT_NAME], parsedDbName[DB_NAME], name, deleteData, envContext, null, dropPartitions); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, NoSuchObjectException.class) .convertIfInstance(IOException.class, MetaException.class).defaultMetaException(); } finally { endFunction("drop_table", success, ex, name); } } private void updateStatsForTruncate(Map<String,String> props, EnvironmentContext environmentContext) { if (null == props) { return; } for (String stat : StatsSetupConst.SUPPORTED_STATS) { String statVal = props.get(stat); if (statVal != null) { //In the case of truncate table, we set the stats to be 0. props.put(stat, "0"); } } //first set basic stats to true StatsSetupConst.setBasicStatsState(props, StatsSetupConst.TRUE); environmentContext.putToProperties(StatsSetupConst.STATS_GENERATED, StatsSetupConst.TASK); environmentContext.putToProperties(StatsSetupConst.DO_NOT_POPULATE_QUICK_STATS, StatsSetupConst.TRUE); //then invalidate column stats StatsSetupConst.clearColumnStatsState(props); return; } private void alterPartitionForTruncate(RawStore ms, String catName, String dbName, String tableName, Table table, Partition partition, String validWriteIds, long writeId) throws Exception { EnvironmentContext environmentContext = new EnvironmentContext(); updateStatsForTruncate(partition.getParameters(), environmentContext); if (!transactionalListeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ALTER_PARTITION, new AlterPartitionEvent(partition, partition, table, true, true, writeId, this)); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_PARTITION, new AlterPartitionEvent(partition, partition, table, true, true, writeId, this)); } if (writeId > 0) { partition.setWriteId(writeId); } alterHandler.alterPartition(ms, wh, catName, dbName, tableName, null, partition, environmentContext, this, validWriteIds); } private void alterTableStatsForTruncate(RawStore ms, String catName, String dbName, String tableName, Table table, List<String> partNames, String validWriteIds, long writeId) throws Exception { if (partNames == null) { if (0 != table.getPartitionKeysSize()) { for (Partition partition : ms.getPartitions(catName, dbName, tableName, -1)) { alterPartitionForTruncate(ms, catName, dbName, tableName, table, partition, validWriteIds, writeId); } } else { EnvironmentContext environmentContext = new EnvironmentContext(); updateStatsForTruncate(table.getParameters(), environmentContext); boolean isReplicated = isDbReplicationTarget(ms.getDatabase(catName, dbName)); if (!transactionalListeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ALTER_TABLE, new AlterTableEvent(table, table, true, true, writeId, this, isReplicated)); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_TABLE, new AlterTableEvent(table, table, true, true, writeId, this, isReplicated)); } // TODO: this should actually pass thru and set writeId for txn stats. if (writeId > 0) { table.setWriteId(writeId); } alterHandler.alterTable(ms, wh, catName, dbName, tableName, table, environmentContext, this, validWriteIds); } } else { for (Partition partition : ms.getPartitionsByNames(catName, dbName, tableName, partNames)) { alterPartitionForTruncate(ms, catName, dbName, tableName, table, partition, validWriteIds, writeId); } } return; } private List<Path> getLocationsForTruncate(final RawStore ms, final String catName, final String dbName, final String tableName, final Table table, final List<String> partNames) throws Exception { List<Path> locations = new ArrayList<>(); if (partNames == null) { if (0 != table.getPartitionKeysSize()) { for (Partition partition : ms.getPartitions(catName, dbName, tableName, -1)) { locations.add(new Path(partition.getSd().getLocation())); } } else { locations.add(new Path(table.getSd().getLocation())); } } else { for (Partition partition : ms.getPartitionsByNames(catName, dbName, tableName, partNames)) { locations.add(new Path(partition.getSd().getLocation())); } } return locations; } @Override public CmRecycleResponse cm_recycle(final CmRecycleRequest request) throws MetaException { wh.recycleDirToCmPath(new Path(request.getDataPath()), request.isPurge()); return new CmRecycleResponse(); } @Override public void truncate_table(final String dbName, final String tableName, List<String> partNames) throws NoSuchObjectException, MetaException { // Deprecated path, won't work for txn tables. truncateTableInternal(dbName, tableName, partNames, null, -1, null); } @Override public TruncateTableResponse truncate_table_req(TruncateTableRequest req) throws MetaException, TException { truncateTableInternal(req.getDbName(), req.getTableName(), req.getPartNames(), req.getValidWriteIdList(), req.getWriteId(), req.getEnvironmentContext()); return new TruncateTableResponse(); } private void truncateTableInternal(String dbName, String tableName, List<String> partNames, String validWriteIds, long writeId, EnvironmentContext context) throws MetaException, NoSuchObjectException { boolean isSkipTrash = false, needCmRecycle = false; try { String[] parsedDbName = parseDbName(dbName, conf); Table tbl = get_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName); boolean skipDataDeletion = Optional.ofNullable(context) .map(EnvironmentContext::getProperties) .map(prop -> prop.get(TRUNCATE_SKIP_DATA_DELETION)) .map(Boolean::parseBoolean) .orElse(false); if (!skipDataDeletion) { boolean truncateFiles = !TxnUtils.isTransactionalTable(tbl) || !MetastoreConf.getBoolVar(getConf(), MetastoreConf.ConfVars.TRUNCATE_ACID_USE_BASE); if (truncateFiles) { isSkipTrash = MetaStoreUtils.isSkipTrash(tbl.getParameters()); Database db = get_database_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME]); needCmRecycle = ReplChangeManager.shouldEnableCm(db, tbl); } // This is not transactional for (Path location : getLocationsForTruncate(getMS(), parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, tbl, partNames)) { FileSystem fs = location.getFileSystem(getConf()); if (truncateFiles) { truncateDataFiles(location, fs, isSkipTrash, needCmRecycle); } else { // For Acid tables we don't need to delete the old files, only write an empty baseDir. // Compaction and cleaner will take care of the rest addTruncateBaseFile(location, writeId, fs); } } } // Alter the table/partition stats and also notify truncate table event alterTableStatsForTruncate(getMS(), parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, tbl, partNames, validWriteIds, writeId); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class, NoSuchObjectException.class) .convertIfInstance(IOException.class, MetaException.class) .defaultMetaException(); } } /** * Add an empty baseDir with a truncate metadatafile * @param location partition or table directory * @param writeId allocated writeId * @param fs FileSystem * @throws Exception */ private void addTruncateBaseFile(Path location, long writeId, FileSystem fs) throws Exception { Path basePath = new Path(location, AcidConstants.baseDir(writeId)); fs.mkdirs(basePath); // We can not leave the folder empty, otherwise it will be skipped at some file listing in AcidUtils // No need for a data file, a simple metadata is enough AcidMetaDataFile.writeToFile(fs, basePath, AcidMetaDataFile.DataFormat.TRUNCATED); } private void truncateDataFiles(Path location, FileSystem fs, boolean isSkipTrash, boolean needCmRecycle) throws IOException, MetaException, NoSuchObjectException { if (!HdfsUtils.isPathEncrypted(getConf(), fs.getUri(), location) && !FileUtils.pathHasSnapshotSubDir(location, fs)) { HdfsUtils.HadoopFileStatus status = new HdfsUtils.HadoopFileStatus(getConf(), fs, location); FileStatus targetStatus = fs.getFileStatus(location); String targetGroup = targetStatus == null ? null : targetStatus.getGroup(); wh.deleteDir(location, true, isSkipTrash, needCmRecycle); fs.mkdirs(location); HdfsUtils.setFullFileStatus(getConf(), status, targetGroup, fs, location, false); } else { FileStatus[] statuses = fs.listStatus(location, FileUtils.HIDDEN_FILES_PATH_FILTER); if (statuses == null || statuses.length == 0) { return; } for (final FileStatus status : statuses) { wh.deleteDir(status.getPath(), true, isSkipTrash, needCmRecycle); } } } /** * Is this an external table? * * @param table * Check if this table is external. * @return True if the table is external, otherwise false. */ private boolean isExternal(Table table) { return MetaStoreUtils.isExternalTable(table); } private boolean isExternalTablePurge(Table table) { return MetaStoreUtils.isExternalTablePurge(table); } @Override @Deprecated public Table get_table(final String dbname, final String name) throws MetaException, NoSuchObjectException { String[] parsedDbName = parseDbName(dbname, conf); GetTableRequest getTableRequest = new GetTableRequest(parsedDbName[DB_NAME],name); getTableRequest.setCatName(parsedDbName[CAT_NAME]); return getTableInternal(getTableRequest); } @Override public List<ExtendedTableInfo> get_tables_ext(final GetTablesExtRequest req) throws MetaException { List<String> tables = new ArrayList<String>(); List<ExtendedTableInfo> ret = new ArrayList<ExtendedTableInfo>(); String catalog = req.getCatalog(); String database = req.getDatabase(); String pattern = req.getTableNamePattern(); List<String> processorCapabilities = req.getProcessorCapabilities(); int limit = req.getLimit(); String processorId = req.getProcessorIdentifier(); List<Table> tObjects = new ArrayList<>(); startTableFunction("get_tables_ext", catalog, database, pattern); Exception ex = null; try { tables = getMS().getTables(catalog, database, pattern, null, limit); LOG.debug("get_tables_ext:getTables() returned " + tables.size()); tables = FilterUtils.filterTableNamesIfEnabled(isServerFilterEnabled, filterHook, catalog, database, tables); if (tables.size() > 0) { tObjects = getMS().getTableObjectsByName(catalog, database, tables); LOG.debug("get_tables_ext:getTableObjectsByName() returned " + tObjects.size()); if (processorCapabilities == null || processorCapabilities.size() == 0 || processorCapabilities.contains("MANAGERAWMETADATA")) { LOG.info("Skipping translation for processor with " + processorId); } else { if (transformer != null) { Map<Table, List<String>> retMap = transformer.transform(tObjects, processorCapabilities, processorId); for (Map.Entry<Table, List<String>> entry : retMap.entrySet()) { LOG.debug("Table " + entry.getKey().getTableName() + " requires " + Arrays.toString((entry.getValue()).toArray())); ret.add(convertTableToExtendedTable(entry.getKey(), entry.getValue(), req.getRequestedFields())); } } else { for (Table table : tObjects) { ret.add(convertTableToExtendedTable(table, processorCapabilities, req.getRequestedFields())); } } } } } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_tables_ext", ret != null, ex); } return ret; } private ExtendedTableInfo convertTableToExtendedTable (Table table, List<String> processorCapabilities, int mask) { ExtendedTableInfo extTable = new ExtendedTableInfo(table.getTableName()); if ((mask & GetTablesExtRequestFields.ACCESS_TYPE.getValue()) == GetTablesExtRequestFields.ACCESS_TYPE.getValue()) { extTable.setAccessType(table.getAccessType()); } if ((mask & GetTablesExtRequestFields.PROCESSOR_CAPABILITIES.getValue()) == GetTablesExtRequestFields.PROCESSOR_CAPABILITIES.getValue()) { extTable.setRequiredReadCapabilities(table.getRequiredReadCapabilities()); extTable.setRequiredWriteCapabilities(table.getRequiredWriteCapabilities()); } return extTable; } @Override public GetTableResult get_table_req(GetTableRequest req) throws MetaException, NoSuchObjectException { req.setCatName(req.isSetCatName() ? req.getCatName() : getDefaultCatalog(conf)); return new GetTableResult(getTableInternal(req)); } /** * This function retrieves table from metastore. If getColumnStats flag is true, * then engine should be specified so the table is retrieve with the column stats * for that engine. */ private Table getTableInternal(GetTableRequest getTableRequest) throws MetaException, NoSuchObjectException { Preconditions.checkArgument(!getTableRequest.isGetColumnStats() || getTableRequest.getEngine() != null, "To retrieve column statistics with a table, engine parameter cannot be null"); if (isInTest) { assertClientHasCapability(getTableRequest.getCapabilities(), ClientCapability.TEST_CAPABILITY, "Hive tests", "get_table_req"); } Table t = null; startTableFunction("get_table", getTableRequest.getCatName(), getTableRequest.getDbName(), getTableRequest.getTblName()); Exception ex = null; try { t = get_table_core(getTableRequest); if (MetaStoreUtils.isInsertOnlyTableParam(t.getParameters())) { assertClientHasCapability(getTableRequest.getCapabilities(), ClientCapability.INSERT_ONLY_TABLES, "insert-only tables", "get_table_req"); } if (CollectionUtils.isEmpty(getTableRequest.getProcessorCapabilities()) || getTableRequest .getProcessorCapabilities().contains("MANAGERAWMETADATA")) { LOG.info("Skipping translation for processor with " + getTableRequest.getProcessorIdentifier()); } else { if (transformer != null) { List<Table> tList = new ArrayList<>(); tList.add(t); Map<Table, List<String>> ret = transformer .transform(tList, getTableRequest.getProcessorCapabilities(), getTableRequest.getProcessorIdentifier()); if (ret.size() > 1) { LOG.warn("Unexpected resultset size:" + ret.size()); throw new MetaException("Unexpected result from metadata transformer:return list size is " + ret.size()); } t = ret.keySet().iterator().next(); } } firePreEvent(new PreReadTableEvent(t, this)); } catch (MetaException | NoSuchObjectException e) { ex = e; throw e; } finally { endFunction("get_table", t != null, ex, getTableRequest.getTblName()); } return t; } @Override public List<TableMeta> get_table_meta(String dbnames, String tblNames, List<String> tblTypes) throws MetaException, NoSuchObjectException { List<TableMeta> t = null; String[] parsedDbName = parseDbName(dbnames, conf); startTableFunction("get_table_metas", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblNames); Exception ex = null; try { t = getMS().getTableMeta(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblNames, tblTypes); t = FilterUtils.filterTableMetasIfEnabled(isServerFilterEnabled, filterHook, parsedDbName[CAT_NAME], parsedDbName[DB_NAME], t); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_table_metas", t != null, ex); } return t; } @Override @Deprecated public Table get_table_core( final String catName, final String dbname, final String name) throws MetaException, NoSuchObjectException { GetTableRequest getTableRequest = new GetTableRequest(dbname,name); getTableRequest.setCatName(catName); return get_table_core(getTableRequest); } @Override @Deprecated public Table get_table_core( final String catName, final String dbname, final String name, final String writeIdList) throws MetaException, NoSuchObjectException { GetTableRequest getTableRequest = new GetTableRequest(dbname,name); getTableRequest.setCatName(catName); getTableRequest.setValidWriteIdList(writeIdList); return get_table_core(getTableRequest); } /** * This function retrieves table from metastore. If getColumnStats flag is true, * then engine should be specified so the table is retrieve with the column stats * for that engine. */ @Override public Table get_table_core(GetTableRequest getTableRequest) throws MetaException, NoSuchObjectException { Preconditions.checkArgument(!getTableRequest.isGetColumnStats() || getTableRequest.getEngine() != null, "To retrieve column statistics with a table, engine parameter cannot be null"); String catName = getTableRequest.getCatName(); String dbName = getTableRequest.getDbName(); String tblName = getTableRequest.getTblName(); Database db = null; Table t = null; try { db = get_database_core(catName, dbName); } catch (Exception e) { /* appears exception is not thrown currently if db doesnt exist */ } if (db != null) { if (db.getType().equals(DatabaseType.REMOTE)) { t = DataConnectorProviderFactory.getDataConnectorProvider(db).getTable(tblName); if (t == null) { throw new NoSuchObjectException(TableName.getQualified(catName, dbName, tblName) + " table not found"); } t.setDbName(dbName); return t; } } try { t = getMS().getTable(catName, dbName, tblName, getTableRequest.getValidWriteIdList(), getTableRequest.getId()); if (t == null) { throw new NoSuchObjectException(TableName.getQualified(catName, dbName, tblName) + " table not found"); } // If column statistics was requested and is valid fetch it. if (getTableRequest.isGetColumnStats()) { ColumnStatistics colStats = getMS().getTableColumnStatistics(catName, dbName, tblName, StatsSetupConst.getColumnsHavingStats(t.getParameters()), getTableRequest.getEngine(), getTableRequest.getValidWriteIdList()); if (colStats != null) { t.setColStats(colStats); } } } catch (Exception e) { throwMetaException(e); } return t; } /** * Gets multiple tables from the hive metastore. * * @param dbName * The name of the database in which the tables reside * @param tableNames * The names of the tables to get. * * @return A list of tables whose names are in the the list "names" and * are retrievable from the database specified by "dbnames." * There is no guarantee of the order of the returned tables. * If there are duplicate names, only one instance of the table will be returned. * @throws MetaException * @throws InvalidOperationException * @throws UnknownDBException */ @Override @Deprecated public List<Table> get_table_objects_by_name(final String dbName, final List<String> tableNames) throws MetaException, InvalidOperationException, UnknownDBException { String[] parsedDbName = parseDbName(dbName, conf); return getTableObjectsInternal(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableNames, null, null, null); } @Override public GetTablesResult get_table_objects_by_name_req(GetTablesRequest req) throws TException { String catName = req.isSetCatName() ? req.getCatName() : getDefaultCatalog(conf); if (isDatabaseRemote(req.getDbName())) { return new GetTablesResult(getRemoteTableObjectsInternal(req.getDbName(), req.getTblNames(), req.getTablesPattern())); } return new GetTablesResult(getTableObjectsInternal(catName, req.getDbName(), req.getTblNames(), req.getCapabilities(), req.getProjectionSpec(), req.getTablesPattern())); } private List<Table> filterTablesByName(List<Table> tables, List<String> tableNames) { List<Table> filteredTables = new ArrayList<>(); for (Table table : tables) { if (tableNames.contains(table.getTableName())) { filteredTables.add(table); } } return filteredTables; } private List<Table> getRemoteTableObjectsInternal(String dbname, List<String> tableNames, String pattern) throws MetaException { String[] parsedDbName = parseDbName(dbname, conf); try { // retrieve tables from remote database Database db = get_database_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME]); List<Table> tables = DataConnectorProviderFactory.getDataConnectorProvider(db).getTables(null); // filtered out undesired tables if (tableNames != null) { tables = filterTablesByName(tables, tableNames); } // set remote tables' local hive database reference for (Table table : tables) { table.setDbName(dbname); } return FilterUtils.filterTablesIfEnabled(isServerFilterEnabled, filterHook, tables); } catch (Exception e) { LOG.warn("Unexpected exception while getting table(s) in remote database " + dbname , e); return new ArrayList<Table>(); } } private List<Table> getTableObjectsInternal(String catName, String dbName, List<String> tableNames, ClientCapabilities capabilities, GetProjectionsSpec projectionsSpec, String tablePattern) throws MetaException, InvalidOperationException, UnknownDBException { if (isInTest) { assertClientHasCapability(capabilities, ClientCapability.TEST_CAPABILITY, "Hive tests", "get_table_objects_by_name_req"); } if (projectionsSpec != null) { if (!projectionsSpec.isSetFieldList() && (projectionsSpec.isSetIncludeParamKeyPattern() || projectionsSpec.isSetExcludeParamKeyPattern())) { throw new InvalidOperationException("Include and Exclude Param key are not supported."); } } List<Table> tables = new ArrayList<>(); startMultiTableFunction("get_multi_table", dbName, tableNames); Exception ex = null; int tableBatchSize = MetastoreConf.getIntVar(conf, ConfVars.BATCH_RETRIEVE_MAX); try { if (dbName == null || dbName.isEmpty()) { throw new UnknownDBException("DB name is null or empty"); } RawStore ms = getMS(); if(tablePattern != null){ tables = ms.getTableObjectsByName(catName, dbName, tableNames, projectionsSpec, tablePattern); }else { if (tableNames == null) { throw new InvalidOperationException(dbName + " cannot find null tables"); } // The list of table names could contain duplicates. RawStore.getTableObjectsByName() // only guarantees returning no duplicate table objects in one batch. If we need // to break into multiple batches, remove duplicates first. List<String> distinctTableNames = tableNames; if (distinctTableNames.size() > tableBatchSize) { List<String> lowercaseTableNames = new ArrayList<>(); for (String tableName : tableNames) { lowercaseTableNames.add(org.apache.hadoop.hive.metastore.utils.StringUtils.normalizeIdentifier(tableName)); } distinctTableNames = new ArrayList<>(new HashSet<>(lowercaseTableNames)); } int startIndex = 0; // Retrieve the tables from the metastore in batches. Some databases like // Oracle cannot have over 1000 expressions in a in-list while (startIndex < distinctTableNames.size()) { int endIndex = Math.min(startIndex + tableBatchSize, distinctTableNames.size()); tables.addAll(ms.getTableObjectsByName(catName, dbName, distinctTableNames.subList( startIndex, endIndex), projectionsSpec, tablePattern)); startIndex = endIndex; } } for (Table t : tables) { if (t.getParameters() != null && MetaStoreUtils.isInsertOnlyTableParam(t.getParameters())) { assertClientHasCapability(capabilities, ClientCapability.INSERT_ONLY_TABLES, "insert-only tables", "get_table_req"); } } tables = FilterUtils.filterTablesIfEnabled(isServerFilterEnabled, filterHook, tables); } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(MetaException.class, InvalidOperationException.class, UnknownDBException.class) .defaultMetaException(); } finally { endFunction("get_multi_table", tables != null, ex, join(tableNames, ",")); } return tables; } @Override public Materialization get_materialization_invalidation_info(final CreationMetadata cm, final String validTxnList) throws MetaException { return getTxnHandler().getMaterializationInvalidationInfo(cm, validTxnList); } @Override public void update_creation_metadata(String catName, final String dbName, final String tableName, CreationMetadata cm) throws MetaException { getMS().updateCreationMetadata(catName, dbName, tableName, cm); } private void assertClientHasCapability(ClientCapabilities client, ClientCapability value, String what, String call) throws MetaException { if (!doesClientHaveCapability(client, value)) { throw new MetaException("Your client does not appear to support " + what + ". To skip" + " capability checks, please set " + ConfVars.CAPABILITY_CHECK.toString() + " to false. This setting can be set globally, or on the client for the current" + " metastore session. Note that this may lead to incorrect results, data loss," + " undefined behavior, etc. if your client is actually incompatible. You can also" + " specify custom client capabilities via " + call + " API."); } } private boolean doesClientHaveCapability(ClientCapabilities client, ClientCapability value) { if (!MetastoreConf.getBoolVar(getConf(), ConfVars.CAPABILITY_CHECK)) { return true; } return (client != null && client.isSetValues() && client.getValues().contains(value)); } @Override public List<String> get_table_names_by_filter( final String dbName, final String filter, final short maxTables) throws MetaException, InvalidOperationException, UnknownDBException { List<String> tables = null; startFunction("get_table_names_by_filter", ": db = " + dbName + ", filter = " + filter); Exception ex = null; String[] parsedDbName = parseDbName(dbName, conf); try { if (parsedDbName[CAT_NAME] == null || parsedDbName[CAT_NAME].isEmpty() || parsedDbName[DB_NAME] == null || parsedDbName[DB_NAME].isEmpty()) { throw new UnknownDBException("DB name is null or empty"); } if (filter == null) { throw new InvalidOperationException(filter + " cannot apply null filter"); } tables = getMS().listTableNamesByFilter(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], filter, maxTables); tables = FilterUtils.filterTableNamesIfEnabled( isServerFilterEnabled, filterHook, parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tables); } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(MetaException.class, InvalidOperationException.class, UnknownDBException.class) .defaultMetaException(); } finally { endFunction("get_table_names_by_filter", tables != null, ex, join(tables, ",")); } return tables; } private Partition append_partition_common(RawStore ms, String catName, String dbName, String tableName, List<String> part_vals, EnvironmentContext envContext) throws InvalidObjectException, AlreadyExistsException, MetaException, NoSuchObjectException { Partition part = new Partition(); boolean success = false, madeDir = false; Path partLocation = null; Table tbl = null; Map<String, String> transactionalListenerResponses = Collections.emptyMap(); Database db = null; try { ms.openTransaction(); part.setCatName(catName); part.setDbName(dbName); part.setTableName(tableName); part.setValues(part_vals); MetaStoreServerUtils.validatePartitionNameCharacters(part_vals, partitionValidationPattern); tbl = ms.getTable(part.getCatName(), part.getDbName(), part.getTableName(), null); if (tbl == null) { throw new InvalidObjectException( "Unable to add partition because table or database do not exist"); } if (tbl.getSd().getLocation() == null) { throw new MetaException( "Cannot append a partition to a view"); } db = get_database_core(catName, dbName); firePreEvent(new PreAddPartitionEvent(tbl, part, this)); part.setSd(tbl.getSd().deepCopy()); partLocation = new Path(tbl.getSd().getLocation(), Warehouse .makePartName(tbl.getPartitionKeys(), part_vals)); part.getSd().setLocation(partLocation.toString()); Partition old_part; try { old_part = ms.getPartition(part.getCatName(), part.getDbName(), part .getTableName(), part.getValues()); } catch (NoSuchObjectException e) { // this means there is no existing partition old_part = null; } if (old_part != null) { throw new AlreadyExistsException("Partition already exists:" + part); } if (!wh.isDir(partLocation)) { if (!wh.mkdirs(partLocation)) { throw new MetaException(partLocation + " is not a directory or unable to create one"); } madeDir = true; } // set create time long time = System.currentTimeMillis() / 1000; part.setCreateTime((int) time); part.putToParameters(hive_metastoreConstants.DDL_TIME, Long.toString(time)); if (canUpdateStats(tbl)) { MetaStoreServerUtils.updatePartitionStatsFast(part, tbl, wh, madeDir, false, envContext, true); } if (ms.addPartition(part)) { if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, part, true, this), envContext); } success = ms.commitTransaction(); } } finally { if (!success) { ms.rollbackTransaction(); if (madeDir) { wh.deleteDir(partLocation, true, false, ReplChangeManager.shouldEnableCm(db, tbl)); } } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, part, success, this), envContext, transactionalListenerResponses, ms); } } return part; } private void firePreEvent(PreEventContext event) throws MetaException { for (MetaStorePreEventListener listener : preListeners) { try { listener.onEvent(event); } catch (NoSuchObjectException e) { throw new MetaException(e.getMessage()); } catch (InvalidOperationException e) { throw new MetaException(e.getMessage()); } } } @Override public Partition append_partition(final String dbName, final String tableName, final List<String> part_vals) throws InvalidObjectException, AlreadyExistsException, MetaException { return append_partition_with_environment_context(dbName, tableName, part_vals, null); } @Override public Partition append_partition_with_environment_context(final String dbName, final String tableName, final List<String> part_vals, final EnvironmentContext envContext) throws InvalidObjectException, AlreadyExistsException, MetaException { if (part_vals == null || part_vals.isEmpty()) { throw new MetaException("The partition values must not be null or empty."); } String[] parsedDbName = parseDbName(dbName, conf); startPartitionFunction("append_partition", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, part_vals); if (LOG.isDebugEnabled()) { for (String part : part_vals) { LOG.debug(part); } } Partition ret = null; Exception ex = null; try { ret = append_partition_common(getMS(), parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, part_vals, envContext); } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(MetaException.class, InvalidObjectException.class, AlreadyExistsException.class) .defaultMetaException(); } finally { endFunction("append_partition", ret != null, ex, tableName); } return ret; } private static class PartValEqWrapperLite { List<String> values; String location; PartValEqWrapperLite(Partition partition) { this.values = partition.isSetValues()? partition.getValues() : null; if (partition.getSd() != null) { this.location = partition.getSd().getLocation(); } } @Override public int hashCode() { return values == null ? 0 : values.hashCode(); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || !(obj instanceof PartValEqWrapperLite)) { return false; } List<String> lhsValues = this.values; List<String> rhsValues = ((PartValEqWrapperLite)obj).values; if (lhsValues == null || rhsValues == null) { return lhsValues == rhsValues; } if (lhsValues.size() != rhsValues.size()) { return false; } for (int i=0; i<lhsValues.size(); ++i) { String lhsValue = lhsValues.get(i); String rhsValue = rhsValues.get(i); if ((lhsValue == null && rhsValue != null) || (lhsValue != null && !lhsValue.equals(rhsValue))) { return false; } } return true; } } private List<Partition> add_partitions_core(final RawStore ms, String catName, String dbName, String tblName, List<Partition> parts, final boolean ifNotExists) throws TException { logAndAudit("add_partitions"); boolean success = false; // Ensures that the list doesn't have dups, and keeps track of directories we have created. final Map<PartValEqWrapperLite, Boolean> addedPartitions = new ConcurrentHashMap<>(); final List<Partition> newParts = new ArrayList<>(); final List<Partition> existingParts = new ArrayList<>(); Table tbl = null; Map<String, String> transactionalListenerResponses = Collections.emptyMap(); Database db = null; List<ColumnStatistics> partsColStats = new ArrayList<>(parts.size()); List<Long> partsWriteIds = new ArrayList<>(parts.size()); throwUnsupportedExceptionIfRemoteDB(dbName, "add_partitions"); Lock tableLock = getTableLockFor(dbName, tblName); tableLock.lock(); try { ms.openTransaction(); tbl = ms.getTable(catName, dbName, tblName, null); if (tbl == null) { throw new InvalidObjectException("Unable to add partitions because " + TableName.getQualified(catName, dbName, tblName) + " does not exist"); } db = ms.getDatabase(catName, dbName); if (!parts.isEmpty()) { firePreEvent(new PreAddPartitionEvent(tbl, parts, this)); } Set<PartValEqWrapperLite> partsToAdd = new HashSet<>(parts.size()); List<Partition> partitionsToAdd = new ArrayList<>(parts.size()); List<FieldSchema> partitionKeys = tbl.getPartitionKeys(); for (final Partition part : parts) { // Collect partition column stats to be updated if present. Partition objects passed down // here at the time of replication may have statistics in them, which is required to be // updated in the metadata. But we don't want it to be part of the Partition object when // it's being created or altered, lest it becomes part of the notification event. if (part.isSetColStats()) { partsColStats.add(part.getColStats()); part.unsetColStats(); partsWriteIds.add(part.getWriteId()); } // Iterate through the partitions and validate them. If one of the partitions is // incorrect, an exception will be thrown before the threads which create the partition // folders are submitted. This way we can be sure that no partition and no partition // folder will be created if the list contains an invalid partition. if (validatePartition(part, catName, tblName, dbName, partsToAdd, ms, ifNotExists, partitionKeys)) { partitionsToAdd.add(part); } else { existingParts.add(part); } } newParts.addAll(createPartitionFolders(partitionsToAdd, tbl, addedPartitions)); if (!newParts.isEmpty()) { ms.addPartitions(catName, dbName, tblName, newParts); } // Notification is generated for newly created partitions only. The subset of partitions // that already exist (existingParts), will not generate notifications. if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, newParts, true, this)); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, newParts, true, this), null, transactionalListenerResponses, ms); if (!existingParts.isEmpty()) { // The request has succeeded but we failed to add these partitions. MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, existingParts, false, this), null, null, ms); } } // Update partition column statistics if available. We need a valid writeId list to // update column statistics for a transactional table. But during bootstrap replication, // where we use this feature, we do not have a valid writeId list which was used to // update the stats. But we know for sure that the writeId associated with the stats was // valid then (otherwise stats update would have failed on the source). So, craft a valid // transaction list with only that writeId and use it to update the stats. int cnt = 0; for (ColumnStatistics partColStats: partsColStats) { long writeId = partsWriteIds.get(cnt++); String validWriteIds = null; if (writeId > 0) { ValidWriteIdList validWriteIdList = new ValidReaderWriteIdList(TableName.getDbTable(tbl.getDbName(), tbl.getTableName()), new long[0], new BitSet(), writeId); validWriteIds = validWriteIdList.toString(); } updatePartitonColStatsInternal(tbl, partColStats, validWriteIds, writeId); } success = ms.commitTransaction(); } finally { try { if (!success) { ms.rollbackTransaction(); cleanupPartitionFolders(addedPartitions, db); if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, parts, false, this), null, null, ms); } } } finally { tableLock.unlock(); } } return newParts; } private Lock getTableLockFor(String dbName, String tblName) { return tablelocks.get(dbName + "." + tblName); } /** * Remove the newly created partition folders. The values in the addedPartitions map indicates * whether or not the location of the partition was newly created. If the value is false, the * partition folder will not be removed. * @param addedPartitions * @throws MetaException * @throws IllegalArgumentException */ private void cleanupPartitionFolders(final Map<PartValEqWrapperLite, Boolean> addedPartitions, Database db) throws MetaException, IllegalArgumentException { for (Map.Entry<PartValEqWrapperLite, Boolean> e : addedPartitions.entrySet()) { if (e.getValue()) { // we just created this directory - it's not a case of pre-creation, so we nuke. wh.deleteDir(new Path(e.getKey().location), true, db); } } } /** * Validate a partition before creating it. The validation checks * <ul> * <li>if the database and table names set in the partition are not null and they are matching * with the expected values set in the tblName and dbName parameters.</li> * <li>if the partition values are set.</li> * <li>if none of the partition values is null.</li> * <li>if the partition values are matching with the pattern set in the * 'metastore.partition.name.whitelist.pattern' configuration property.</li> * <li>if the partition doesn't already exist. If the partition already exists, an exception * will be thrown if the ifNotExists parameter is false, otherwise it will be just ignored.</li> * <li>if the partsToAdd set doesn't contain the partition. The partsToAdd set contains the * partitions which are already validated. If the set contains the current partition, it means * that the partition is tried to be added multiple times in the same batch. Please note that * the set will be updated with the current partition if the validation was successful.</li> * </ul> * @param part * @param catName * @param tblName * @param dbName * @param partsToAdd * @param ms * @param ifNotExists * @return * @throws MetaException * @throws TException */ private boolean validatePartition(final Partition part, final String catName, final String tblName, final String dbName, final Set<PartValEqWrapperLite> partsToAdd, final RawStore ms, final boolean ifNotExists, List<FieldSchema> partitionKeys) throws MetaException, TException { if (part.getDbName() == null || part.getTableName() == null) { throw new MetaException("The database and table name must be set in the partition."); } if (!part.getTableName().equalsIgnoreCase(tblName) || !part.getDbName().equalsIgnoreCase(dbName)) { String errorMsg = String.format( "Partition does not belong to target table %s. It belongs to the table %s.%s : %s", TableName.getQualified(catName, dbName, tblName), part.getDbName(), part.getTableName(), part.toString()); throw new MetaException(errorMsg); } if (part.getValues() == null || part.getValues().isEmpty()) { throw new MetaException("The partition values cannot be null or empty."); } if (part.getValues().contains(null)) { throw new MetaException("Partition value cannot be null."); } boolean shouldAdd = startAddPartition(ms, part, partitionKeys, ifNotExists); if (!shouldAdd) { LOG.info("Not adding partition {} as it already exists", part); return false; } if (!partsToAdd.add(new PartValEqWrapperLite(part))) { // Technically, for ifNotExists case, we could insert one and discard the other // because the first one now "exists", but it seems better to report the problem // upstream as such a command doesn't make sense. throw new MetaException("Duplicate partitions in the list: " + part); } return true; } /** * Create the location folders for the partitions. For each partition a separate thread will be * started to create the folder. The method will wait until all threads are finished and returns * the partitions whose folders were created successfully. If an error occurs during the * execution of a thread, a MetaException will be thrown. * @param partitionsToAdd * @param table * @param addedPartitions * @return * @throws MetaException */ private List<Partition> createPartitionFolders(final List<Partition> partitionsToAdd, final Table table, final Map<PartValEqWrapperLite, Boolean> addedPartitions) throws MetaException { final AtomicBoolean failureOccurred = new AtomicBoolean(false); final List<Future<Partition>> partFutures = new ArrayList<>(partitionsToAdd.size()); final Map<PartValEqWrapperLite, Boolean> addedParts = new ConcurrentHashMap<>(); final UserGroupInformation ugi; try { ugi = UserGroupInformation.getCurrentUser(); } catch (IOException e) { throw new RuntimeException(e); } for (final Partition partition : partitionsToAdd) { initializePartitionParameters(table, partition); partFutures.add(threadPool.submit(() -> { if (failureOccurred.get()) { return null; } ugi.doAs((PrivilegedExceptionAction<Partition>) () -> { try { boolean madeDir = createLocationForAddedPartition(table, partition); addedParts.put(new PartValEqWrapperLite(partition), madeDir); initializeAddedPartition(table, partition, madeDir, null); } catch (MetaException e) { throw new IOException(e.getMessage(), e); } return null; }); return partition; })); } List<Partition> newParts = new ArrayList<>(partitionsToAdd.size()); String errorMessage = null; for (Future<Partition> partFuture : partFutures) { try { Partition part = partFuture.get(); if (part != null && !failureOccurred.get()) { newParts.add(part); } } catch (ExecutionException e) { // If an exception is thrown in the execution of a task, set the failureOccurred flag to // true. This flag is visible in the tasks and if its value is true, the partition // folders won't be created. // Then iterate through the remaining tasks and wait for them to finish. The tasks which // are started before the flag got set will then finish creating the partition folders. // The tasks which are started after the flag got set, won't create the partition // folders, to avoid unnecessary work. // This way it is sure that all tasks are finished, when entering the finally part where // the partition folders are cleaned up. It won't happen that a task is still running // when cleaning up the folders, so it is sure we won't have leftover folders. // Canceling the other tasks would be also an option but during testing it turned out // that it is not a trustworthy solution to avoid leftover folders. failureOccurred.compareAndSet(false, true); errorMessage = e.getMessage(); } catch (InterruptedException e) { failureOccurred.compareAndSet(false, true); errorMessage = e.getMessage(); // Restore interruption status of the corresponding thread Thread.currentThread().interrupt(); } } addedPartitions.putAll(addedParts); if (failureOccurred.get()) { throw new MetaException(errorMessage); } return newParts; } @Override public AddPartitionsResult add_partitions_req(AddPartitionsRequest request) throws TException { AddPartitionsResult result = new AddPartitionsResult(); if (request.getParts().isEmpty()) { return result; } try { if (!request.isSetCatName()) { request.setCatName(getDefaultCatalog(conf)); } // Make sure all of the partitions have the catalog set as well request.getParts().forEach(p -> { if (!p.isSetCatName()) { p.setCatName(getDefaultCatalog(conf)); } }); List<Partition> parts = add_partitions_core(getMS(), request.getCatName(), request.getDbName(), request.getTblName(), request.getParts(), request.isIfNotExists()); if (request.isNeedResult()) { result.setPartitions(parts); } } catch (Exception e) { throw handleException(e).throwIfInstance(TException.class).defaultMetaException(); } return result; } @Override public int add_partitions(final List<Partition> parts) throws MetaException, InvalidObjectException, AlreadyExistsException { startFunction("add_partition"); if (parts == null) { throw new MetaException("Partition list cannot be null."); } if (parts.isEmpty()) { return 0; } Integer ret = null; Exception ex = null; try { // Old API assumed all partitions belong to the same table; keep the same assumption if (!parts.get(0).isSetCatName()) { String defaultCat = getDefaultCatalog(conf); for (Partition p : parts) { p.setCatName(defaultCat); } } ret = add_partitions_core(getMS(), parts.get(0).getCatName(), parts.get(0).getDbName(), parts.get(0).getTableName(), parts, false).size(); assert ret == parts.size(); } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(MetaException.class, InvalidObjectException.class, AlreadyExistsException.class) .defaultMetaException(); } finally { String tableName = parts.get(0).getTableName(); endFunction("add_partition", ret != null, ex, tableName); } return ret; } @Override public int add_partitions_pspec(final List<PartitionSpec> partSpecs) throws TException { logAndAudit("add_partitions_pspec"); if (partSpecs.isEmpty()) { return 0; } String dbName = partSpecs.get(0).getDbName(); String tableName = partSpecs.get(0).getTableName(); // If the catalog name isn't set, we need to go through and set it. String catName; if (!partSpecs.get(0).isSetCatName()) { catName = getDefaultCatalog(conf); partSpecs.forEach(ps -> ps.setCatName(catName)); } else { catName = partSpecs.get(0).getCatName(); } return add_partitions_pspec_core(getMS(), catName, dbName, tableName, partSpecs, false); } private int add_partitions_pspec_core(RawStore ms, String catName, String dbName, String tblName, List<PartitionSpec> partSpecs, boolean ifNotExists) throws TException { boolean success = false; if (dbName == null || tblName == null) { throw new MetaException("The database and table name cannot be null."); } // Ensures that the list doesn't have dups, and keeps track of directories we have created. final Map<PartValEqWrapperLite, Boolean> addedPartitions = new ConcurrentHashMap<>(); PartitionSpecProxy partitionSpecProxy = PartitionSpecProxy.Factory.get(partSpecs); final PartitionSpecProxy.PartitionIterator partitionIterator = partitionSpecProxy .getPartitionIterator(); Table tbl = null; Map<String, String> transactionalListenerResponses = Collections.emptyMap(); Database db = null; Lock tableLock = getTableLockFor(dbName, tblName); tableLock.lock(); try { ms.openTransaction(); try { db = ms.getDatabase(catName, dbName); } catch (NoSuchObjectException notExists) { throw new InvalidObjectException("Unable to add partitions because " + "database or table " + dbName + "." + tblName + " does not exist"); } if (db.getType() == DatabaseType.REMOTE) { throw new MetaException("Operation add_partitions_pspec not supported on tables in REMOTE database"); } tbl = ms.getTable(catName, dbName, tblName, null); if (tbl == null) { throw new InvalidObjectException("Unable to add partitions because " + "database or table " + dbName + "." + tblName + " does not exist"); } firePreEvent(new PreAddPartitionEvent(tbl, partitionSpecProxy, this)); Set<PartValEqWrapperLite> partsToAdd = new HashSet<>(partitionSpecProxy.size()); List<Partition> partitionsToAdd = new ArrayList<>(partitionSpecProxy.size()); List<FieldSchema> partitionKeys = tbl.getPartitionKeys(); while (partitionIterator.hasNext()) { // Iterate through the partitions and validate them. If one of the partitions is // incorrect, an exception will be thrown before the threads which create the partition // folders are submitted. This way we can be sure that no partition or partition folder // will be created if the list contains an invalid partition. final Partition part = partitionIterator.getCurrent(); if (validatePartition(part, catName, tblName, dbName, partsToAdd, ms, ifNotExists, partitionKeys)) { partitionsToAdd.add(part); } partitionIterator.next(); } createPartitionFolders(partitionsToAdd, tbl, addedPartitions); ms.addPartitions(catName, dbName, tblName, partitionSpecProxy, ifNotExists); if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, partitionSpecProxy, true, this)); } success = ms.commitTransaction(); return addedPartitions.size(); } finally { try { if (!success) { ms.rollbackTransaction(); cleanupPartitionFolders(addedPartitions, db); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, partitionSpecProxy, true, this), null, transactionalListenerResponses, ms); } } finally { tableLock.unlock(); } } } private boolean startAddPartition( RawStore ms, Partition part, List<FieldSchema> partitionKeys, boolean ifNotExists) throws TException { MetaStoreServerUtils.validatePartitionNameCharacters(part.getValues(), partitionValidationPattern); boolean doesExist = ms.doesPartitionExist(part.getCatName(), part.getDbName(), part.getTableName(), partitionKeys, part.getValues()); if (doesExist && !ifNotExists) { throw new AlreadyExistsException("Partition already exists: " + part); } return !doesExist; } /** * Handles the location for a partition being created. * @param tbl Table. * @param part Partition. * @return Whether the partition SD location is set to a newly created directory. */ private boolean createLocationForAddedPartition( final Table tbl, final Partition part) throws MetaException { Path partLocation = null; String partLocationStr = null; if (part.getSd() != null) { partLocationStr = part.getSd().getLocation(); } if (partLocationStr == null || partLocationStr.isEmpty()) { // set default location if not specified and this is // a physical table partition (not a view) if (tbl.getSd().getLocation() != null) { partLocation = new Path(tbl.getSd().getLocation(), Warehouse .makePartName(tbl.getPartitionKeys(), part.getValues())); } } else { if (tbl.getSd().getLocation() == null) { throw new MetaException("Cannot specify location for a view partition"); } partLocation = wh.getDnsPath(new Path(partLocationStr)); } boolean result = false; if (partLocation != null) { part.getSd().setLocation(partLocation.toString()); // Check to see if the directory already exists before calling // mkdirs() because if the file system is read-only, mkdirs will // throw an exception even if the directory already exists. if (!wh.isDir(partLocation)) { if (!wh.mkdirs(partLocation)) { throw new MetaException(partLocation + " is not a directory or unable to create one"); } result = true; } } return result; } /** * Verify if update stats while altering partition(s) * For the following three cases HMS will not update partition stats * 1) Table property 'DO_NOT_UPDATE_STATS' = True * 2) HMS configuration property 'STATS_AUTO_GATHER' = False * 3) Is View */ private boolean canUpdateStats(Table tbl) { Map<String,String> tblParams = tbl.getParameters(); boolean updateStatsTbl = true; if ((tblParams != null) && tblParams.containsKey(StatsSetupConst.DO_NOT_UPDATE_STATS)) { updateStatsTbl = !Boolean.valueOf(tblParams.get(StatsSetupConst.DO_NOT_UPDATE_STATS)); } if (!MetastoreConf.getBoolVar(conf, ConfVars.STATS_AUTO_GATHER) || MetaStoreUtils.isView(tbl) || !updateStatsTbl) { return false; } return true; } private void initializeAddedPartition(final Table tbl, final Partition part, boolean madeDir, EnvironmentContext environmentContext) throws MetaException { initializeAddedPartition(tbl, new PartitionSpecProxy.SimplePartitionWrapperIterator(part), madeDir, environmentContext); } private void initializeAddedPartition( final Table tbl, final PartitionSpecProxy.PartitionIterator part, boolean madeDir, EnvironmentContext environmentContext) throws MetaException { if (canUpdateStats(tbl)) { MetaStoreServerUtils.updatePartitionStatsFast(part, tbl, wh, madeDir, false, environmentContext, true); } // set create time long time = System.currentTimeMillis() / 1000; part.setCreateTime((int) time); if (part.getParameters() == null || part.getParameters().get(hive_metastoreConstants.DDL_TIME) == null) { part.putToParameters(hive_metastoreConstants.DDL_TIME, Long.toString(time)); } } private void initializePartitionParameters(final Table tbl, final Partition part) throws MetaException { initializePartitionParameters(tbl, new PartitionSpecProxy.SimplePartitionWrapperIterator(part)); } private void initializePartitionParameters(final Table tbl, final PartitionSpecProxy.PartitionIterator part) throws MetaException { // Inherit table properties into partition properties. Map<String, String> tblParams = tbl.getParameters(); String inheritProps = MetastoreConf.getVar(conf, ConfVars.PART_INHERIT_TBL_PROPS).trim(); // Default value is empty string in which case no properties will be inherited. // * implies all properties needs to be inherited Set<String> inheritKeys = new HashSet<>(Arrays.asList(inheritProps.split(","))); if (inheritKeys.contains("*")) { inheritKeys = tblParams.keySet(); } for (String key : inheritKeys) { String paramVal = tblParams.get(key); if (null != paramVal) { // add the property only if it exists in table properties part.putToParameters(key, paramVal); } } } private Partition add_partition_core(final RawStore ms, final Partition part, final EnvironmentContext envContext) throws TException { boolean success = false; Table tbl = null; Map<String, String> transactionalListenerResponses = Collections.emptyMap(); if (!part.isSetCatName()) { part.setCatName(getDefaultCatalog(conf)); } try { ms.openTransaction(); tbl = ms.getTable(part.getCatName(), part.getDbName(), part.getTableName(), null); if (tbl == null) { throw new InvalidObjectException( "Unable to add partition because table or database do not exist"); } firePreEvent(new PreAddPartitionEvent(tbl, part, this)); if (part.getValues() == null || part.getValues().isEmpty()) { throw new MetaException("The partition values cannot be null or empty."); } boolean shouldAdd = startAddPartition(ms, part, tbl.getPartitionKeys(), false); assert shouldAdd; // start would throw if it already existed here boolean madeDir = createLocationForAddedPartition(tbl, part); try { initializeAddedPartition(tbl, part, madeDir, envContext); initializePartitionParameters(tbl, part); success = ms.addPartition(part); } finally { if (!success && madeDir) { wh.deleteDir(new Path(part.getSd().getLocation()), true, false, ReplChangeManager.shouldEnableCm(ms.getDatabase(part.getCatName(), part.getDbName()), tbl)); } } // Setting success to false to make sure that if the listener fails, rollback happens. success = false; if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, Arrays.asList(part), true, this), envContext); } // we proceed only if we'd actually succeeded anyway, otherwise, // we'd have thrown an exception success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_PARTITION, new AddPartitionEvent(tbl, Arrays.asList(part), success, this), envContext, transactionalListenerResponses, ms); } } return part; } @Override public Partition add_partition(final Partition part) throws InvalidObjectException, AlreadyExistsException, MetaException { return add_partition_with_environment_context(part, null); } @Override public Partition add_partition_with_environment_context( final Partition part, EnvironmentContext envContext) throws InvalidObjectException, AlreadyExistsException, MetaException { if (part == null) { throw new MetaException("Partition cannot be null."); } startTableFunction("add_partition", part.getCatName(), part.getDbName(), part.getTableName()); Partition ret = null; Exception ex = null; try { ret = add_partition_core(getMS(), part, envContext); } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(MetaException.class, InvalidObjectException.class, AlreadyExistsException.class) .defaultMetaException(); } finally { endFunction("add_partition", ret != null, ex, part != null ? part.getTableName(): null); } return ret; } @Override public Partition exchange_partition(Map<String, String> partitionSpecs, String sourceDbName, String sourceTableName, String destDbName, String destTableName) throws TException { exchange_partitions(partitionSpecs, sourceDbName, sourceTableName, destDbName, destTableName); // Wouldn't it make more sense to return the first element of the list returned by the // previous call? return new Partition(); } @Override public List<Partition> exchange_partitions(Map<String, String> partitionSpecs, String sourceDbName, String sourceTableName, String destDbName, String destTableName) throws TException { String[] parsedDestDbName = parseDbName(destDbName, conf); String[] parsedSourceDbName = parseDbName(sourceDbName, conf); // No need to check catalog for null as parseDbName() will never return null for the catalog. if (partitionSpecs == null || parsedSourceDbName[DB_NAME] == null || sourceTableName == null || parsedDestDbName[DB_NAME] == null || destTableName == null) { throw new MetaException("The DB and table name for the source and destination tables," + " and the partition specs must not be null."); } if (!parsedDestDbName[CAT_NAME].equals(parsedSourceDbName[CAT_NAME])) { throw new MetaException("You cannot move a partition across catalogs"); } boolean success = false; boolean pathCreated = false; RawStore ms = getMS(); ms.openTransaction(); Table destinationTable = ms.getTable( parsedDestDbName[CAT_NAME], parsedDestDbName[DB_NAME], destTableName, null); if (destinationTable == null) { throw new MetaException( "The destination table " + TableName.getQualified(parsedDestDbName[CAT_NAME], parsedDestDbName[DB_NAME], destTableName) + " not found"); } Table sourceTable = ms.getTable( parsedSourceDbName[CAT_NAME], parsedSourceDbName[DB_NAME], sourceTableName, null); if (sourceTable == null) { throw new MetaException("The source table " + TableName.getQualified(parsedSourceDbName[CAT_NAME], parsedSourceDbName[DB_NAME], sourceTableName) + " not found"); } List<String> partVals = MetaStoreUtils.getPvals(sourceTable.getPartitionKeys(), partitionSpecs); List<String> partValsPresent = new ArrayList<> (); List<FieldSchema> partitionKeysPresent = new ArrayList<> (); int i = 0; for (FieldSchema fs: sourceTable.getPartitionKeys()) { String partVal = partVals.get(i); if (partVal != null && !partVal.equals("")) { partValsPresent.add(partVal); partitionKeysPresent.add(fs); } i++; } // Passed the unparsed DB name here, as get_partitions_ps expects to parse it List<Partition> partitionsToExchange = get_partitions_ps(sourceDbName, sourceTableName, partVals, (short)-1); if (partitionsToExchange == null || partitionsToExchange.isEmpty()) { throw new MetaException("No partition is found with the values " + partitionSpecs + " for the table " + sourceTableName); } boolean sameColumns = MetaStoreUtils.compareFieldColumns( sourceTable.getSd().getCols(), destinationTable.getSd().getCols()); boolean samePartitions = MetaStoreUtils.compareFieldColumns( sourceTable.getPartitionKeys(), destinationTable.getPartitionKeys()); if (!sameColumns || !samePartitions) { throw new MetaException("The tables have different schemas." + " Their partitions cannot be exchanged."); } Path sourcePath = new Path(sourceTable.getSd().getLocation(), Warehouse.makePartName(partitionKeysPresent, partValsPresent)); Path destPath = new Path(destinationTable.getSd().getLocation(), Warehouse.makePartName(partitionKeysPresent, partValsPresent)); List<Partition> destPartitions = new ArrayList<>(); Map<String, String> transactionalListenerResponsesForAddPartition = Collections.emptyMap(); List<Map<String, String>> transactionalListenerResponsesForDropPartition = Lists.newArrayListWithCapacity(partitionsToExchange.size()); // Check if any of the partitions already exists in destTable. List<String> destPartitionNames = ms.listPartitionNames(parsedDestDbName[CAT_NAME], parsedDestDbName[DB_NAME], destTableName, (short) -1); if (destPartitionNames != null && !destPartitionNames.isEmpty()) { for (Partition partition : partitionsToExchange) { String partToExchangeName = Warehouse.makePartName(destinationTable.getPartitionKeys(), partition.getValues()); if (destPartitionNames.contains(partToExchangeName)) { throw new MetaException("The partition " + partToExchangeName + " already exists in the table " + destTableName); } } } Database srcDb = ms.getDatabase(parsedSourceDbName[CAT_NAME], parsedSourceDbName[DB_NAME]); Database destDb = ms.getDatabase(parsedDestDbName[CAT_NAME], parsedDestDbName[DB_NAME]); if (!HiveMetaStore.isRenameAllowed(srcDb, destDb)) { throw new MetaException("Exchange partition not allowed for " + TableName.getQualified(parsedSourceDbName[CAT_NAME], parsedSourceDbName[DB_NAME], sourceTableName) + " Dest db : " + destDbName); } try { for (Partition partition: partitionsToExchange) { Partition destPartition = new Partition(partition); destPartition.setDbName(parsedDestDbName[DB_NAME]); destPartition.setTableName(destinationTable.getTableName()); Path destPartitionPath = new Path(destinationTable.getSd().getLocation(), Warehouse.makePartName(destinationTable.getPartitionKeys(), partition.getValues())); destPartition.getSd().setLocation(destPartitionPath.toString()); ms.addPartition(destPartition); destPartitions.add(destPartition); ms.dropPartition(parsedSourceDbName[CAT_NAME], partition.getDbName(), sourceTable.getTableName(), partition.getValues()); } Path destParentPath = destPath.getParent(); if (!wh.isDir(destParentPath)) { if (!wh.mkdirs(destParentPath)) { throw new MetaException("Unable to create path " + destParentPath); } } /* * TODO: Use the hard link feature of hdfs * once https://issues.apache.org/jira/browse/HDFS-3370 is done */ pathCreated = wh.renameDir(sourcePath, destPath, false); // Setting success to false to make sure that if the listener fails, rollback happens. success = false; if (!transactionalListeners.isEmpty()) { transactionalListenerResponsesForAddPartition = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_PARTITION, new AddPartitionEvent(destinationTable, destPartitions, true, this)); for (Partition partition : partitionsToExchange) { DropPartitionEvent dropPartitionEvent = new DropPartitionEvent(sourceTable, partition, true, true, this); transactionalListenerResponsesForDropPartition.add( MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DROP_PARTITION, dropPartitionEvent)); } } success = ms.commitTransaction(); return destPartitions; } finally { if (!success || !pathCreated) { ms.rollbackTransaction(); if (pathCreated) { wh.renameDir(destPath, sourcePath, false); } } if (!listeners.isEmpty()) { AddPartitionEvent addPartitionEvent = new AddPartitionEvent(destinationTable, destPartitions, success, this); MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_PARTITION, addPartitionEvent, null, transactionalListenerResponsesForAddPartition, ms); i = 0; for (Partition partition : partitionsToExchange) { DropPartitionEvent dropPartitionEvent = new DropPartitionEvent(sourceTable, partition, success, true, this); Map<String, String> parameters = (transactionalListenerResponsesForDropPartition.size() > i) ? transactionalListenerResponsesForDropPartition.get(i) : null; MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_PARTITION, dropPartitionEvent, null, parameters, ms); i++; } } } } private boolean drop_partition_common(RawStore ms, String catName, String db_name, String tbl_name, List<String> part_vals, final boolean deleteData, final EnvironmentContext envContext) throws MetaException, NoSuchObjectException, IOException, InvalidObjectException, InvalidInputException { boolean success = false; Path partPath = null; Table tbl = null; Partition part = null; boolean isArchived = false; Path archiveParentDir = null; boolean mustPurge = false; boolean tableDataShouldBeDeleted = false; boolean needsCm = false; Map<String, String> transactionalListenerResponses = Collections.emptyMap(); if (db_name == null) { throw new MetaException("The DB name cannot be null."); } if (tbl_name == null) { throw new MetaException("The table name cannot be null."); } if (part_vals == null) { throw new MetaException("The partition values cannot be null."); } try { ms.openTransaction(); part = ms.getPartition(catName, db_name, tbl_name, part_vals); GetTableRequest request = new GetTableRequest(db_name,tbl_name); request.setCatName(catName); tbl = get_table_core(request); tableDataShouldBeDeleted = checkTableDataShouldBeDeleted(tbl, deleteData); firePreEvent(new PreDropPartitionEvent(tbl, part, deleteData, this)); mustPurge = isMustPurge(envContext, tbl); if (part == null) { throw new NoSuchObjectException("Partition doesn't exist. " + part_vals); } isArchived = MetaStoreUtils.isArchived(part); if (isArchived) { archiveParentDir = MetaStoreUtils.getOriginalLocation(part); verifyIsWritablePath(archiveParentDir); } if ((part.getSd() != null) && (part.getSd().getLocation() != null)) { partPath = new Path(part.getSd().getLocation()); verifyIsWritablePath(partPath); } if (!ms.dropPartition(catName, db_name, tbl_name, part_vals)) { throw new MetaException("Unable to drop partition"); } else { if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DROP_PARTITION, new DropPartitionEvent(tbl, part, true, deleteData, this), envContext); } needsCm = ReplChangeManager.shouldEnableCm(ms.getDatabase(catName, db_name), tbl); success = ms.commitTransaction(); } } finally { if (!success) { ms.rollbackTransaction(); } else if (deleteData && ((partPath != null) || (archiveParentDir != null))) { if (tableDataShouldBeDeleted) { if (mustPurge) { LOG.info("dropPartition() will purge " + partPath + " directly, skipping trash."); } else { LOG.info("dropPartition() will move " + partPath + " to trash-directory."); } // Archived partitions have har:/to_har_file as their location. // The original directory was saved in params if (isArchived) { assert (archiveParentDir != null); wh.deleteDir(archiveParentDir, true, mustPurge, needsCm); } else { assert (partPath != null); wh.deleteDir(partPath, true, mustPurge, needsCm); deleteParentRecursive(partPath.getParent(), part_vals.size() - 1, mustPurge, needsCm); } // ok even if the data is not deleted } } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_PARTITION, new DropPartitionEvent(tbl, part, success, deleteData, this), envContext, transactionalListenerResponses, ms); } } return true; } private static boolean isMustPurge(EnvironmentContext envContext, Table tbl) { // Data needs deletion. Check if trash may be skipped. // Trash may be skipped iff: // 1. deleteData == true, obviously. // 2. tbl is external. // 3. Either // 3.1. User has specified PURGE from the commandline, and if not, // 3.2. User has set the table to auto-purge. return ((envContext != null) && Boolean.parseBoolean(envContext.getProperties().get("ifPurge"))) || MetaStoreUtils.isSkipTrash(tbl.getParameters()); } private void throwUnsupportedExceptionIfRemoteDB(String dbName, String operationName) throws MetaException { if (isDatabaseRemote(dbName)) { throw new MetaException("Operation " + operationName + " not supported for REMOTE database " + dbName); } } private boolean isDatabaseRemote(String name) { try { String[] parsedDbName = parseDbName(name, conf); Database db = get_database_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME]); if (db != null && db.getType() != null && db.getType() == DatabaseType.REMOTE) { return true; } } catch (Exception e) { return false; } return false; } private void deleteParentRecursive(Path parent, int depth, boolean mustPurge, boolean needRecycle) throws IOException, MetaException { if (depth > 0 && parent != null && wh.isWritable(parent) && wh.isEmptyDir(parent)) { wh.deleteDir(parent, true, mustPurge, needRecycle); deleteParentRecursive(parent.getParent(), depth - 1, mustPurge, needRecycle); } } @Override public boolean drop_partition(final String db_name, final String tbl_name, final List<String> part_vals, final boolean deleteData) throws TException { return drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData, null); } /** Stores a path and its size. */ private static class PathAndDepth implements Comparable<PathAndDepth> { final Path path; final int depth; public PathAndDepth(Path path, int depth) { this.path = path; this.depth = depth; } @Override public int hashCode() { return Objects.hash(path.hashCode(), depth); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PathAndDepth that = (PathAndDepth) o; return depth == that.depth && Objects.equals(path, that.path); } /** The largest {@code depth} is processed first in a {@link PriorityQueue}. */ @Override public int compareTo(PathAndDepth o) { return o.depth - depth; } } @Override public DropPartitionsResult drop_partitions_req( DropPartitionsRequest request) throws TException { RawStore ms = getMS(); String dbName = request.getDbName(), tblName = request.getTblName(); String catName = request.isSetCatName() ? request.getCatName() : getDefaultCatalog(conf); boolean ifExists = request.isSetIfExists() && request.isIfExists(); boolean deleteData = request.isSetDeleteData() && request.isDeleteData(); boolean ignoreProtection = request.isSetIgnoreProtection() && request.isIgnoreProtection(); boolean needResult = !request.isSetNeedResult() || request.isNeedResult(); List<PathAndDepth> dirsToDelete = new ArrayList<>(); List<Path> archToDelete = new ArrayList<>(); EnvironmentContext envContext = request.isSetEnvironmentContext() ? request.getEnvironmentContext() : null; boolean success = false; ms.openTransaction(); Table tbl = null; List<Partition> parts = null; boolean mustPurge = false; Map<String, String> transactionalListenerResponses = null; boolean needsCm = ReplChangeManager.shouldEnableCm(ms.getDatabase(catName, dbName), ms.getTable(catName, dbName, tblName)); try { // We need Partition-s for firing events and for result; DN needs MPartition-s to drop. // Great... Maybe we could bypass fetching MPartitions by issuing direct SQL deletes. tbl = get_table_core(catName, dbName, tblName); mustPurge = isMustPurge(envContext, tbl); int minCount = 0; RequestPartsSpec spec = request.getParts(); List<String> partNames = null; if (spec.isSetExprs()) { // Dropping by expressions. parts = new ArrayList<>(spec.getExprs().size()); for (DropPartitionsExpr expr : spec.getExprs()) { ++minCount; // At least one partition per expression, if not ifExists List<Partition> result = new ArrayList<>(); boolean hasUnknown = ms.getPartitionsByExpr( catName, dbName, tblName, expr.getExpr(), null, (short)-1, result); if (hasUnknown) { // Expr is built by DDLSA, it should only contain part cols and simple ops throw new MetaException("Unexpected unknown partitions to drop"); } // this is to prevent dropping archived partition which is archived in a // different level the drop command specified. if (!ignoreProtection && expr.isSetPartArchiveLevel()) { for (Partition part : parts) { if (MetaStoreUtils.isArchived(part) && MetaStoreUtils.getArchivingLevel(part) < expr.getPartArchiveLevel()) { throw new MetaException("Cannot drop a subset of partitions " + " in an archive, partition " + part); } } } parts.addAll(result); } } else if (spec.isSetNames()) { partNames = spec.getNames(); minCount = partNames.size(); parts = ms.getPartitionsByNames(catName, dbName, tblName, partNames); } else { throw new MetaException("Partition spec is not set"); } if ((parts.size() < minCount) && !ifExists) { throw new NoSuchObjectException("Some partitions to drop are missing"); } List<String> colNames = null; if (partNames == null) { partNames = new ArrayList<>(parts.size()); colNames = new ArrayList<>(tbl.getPartitionKeys().size()); for (FieldSchema col : tbl.getPartitionKeys()) { colNames.add(col.getName()); } } for (Partition part : parts) { // TODO - we need to speed this up for the normal path where all partitions are under // the table and we don't have to stat every partition firePreEvent(new PreDropPartitionEvent(tbl, part, deleteData, this)); if (colNames != null) { partNames.add(FileUtils.makePartName(colNames, part.getValues())); } // Preserve the old behavior of failing when we cannot write, even w/o deleteData, // and even if the table is external. That might not make any sense. if (MetaStoreUtils.isArchived(part)) { Path archiveParentDir = MetaStoreUtils.getOriginalLocation(part); verifyIsWritablePath(archiveParentDir); archToDelete.add(archiveParentDir); } if ((part.getSd() != null) && (part.getSd().getLocation() != null)) { Path partPath = new Path(part.getSd().getLocation()); verifyIsWritablePath(partPath); dirsToDelete.add(new PathAndDepth(partPath, part.getValues().size())); } } ms.dropPartitions(catName, dbName, tblName, partNames); if (parts != null && !parts.isEmpty() && !transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier .notifyEvent(transactionalListeners, EventType.DROP_PARTITION, new DropPartitionEvent(tbl, parts, true, deleteData, this), envContext); } success = ms.commitTransaction(); DropPartitionsResult result = new DropPartitionsResult(); if (needResult) { result.setPartitions(parts); } return result; } finally { if (!success) { ms.rollbackTransaction(); } else if (checkTableDataShouldBeDeleted(tbl, deleteData)) { LOG.info( mustPurge? "dropPartition() will purge partition-directories directly, skipping trash." : "dropPartition() will move partition-directories to trash-directory."); // Archived partitions have har:/to_har_file as their location. // The original directory was saved in params for (Path path : archToDelete) { wh.deleteDir(path, true, mustPurge, needsCm); } // Uses a priority queue to delete the parents of deleted directories if empty. // Parents with the deepest path are always processed first. It guarantees that the emptiness // of a parent won't be changed once it has been processed. So duplicated processing can be // avoided. PriorityQueue<PathAndDepth> parentsToDelete = new PriorityQueue<>(); for (PathAndDepth p : dirsToDelete) { wh.deleteDir(p.path, true, mustPurge, needsCm); addParentForDel(parentsToDelete, p); } HashSet<PathAndDepth> processed = new HashSet<>(); while (!parentsToDelete.isEmpty()) { try { PathAndDepth p = parentsToDelete.poll(); if (processed.contains(p)) { continue; } processed.add(p); Path path = p.path; if (wh.isWritable(path) && wh.isEmptyDir(path)) { wh.deleteDir(path, true, mustPurge, needsCm); addParentForDel(parentsToDelete, p); } } catch (IOException ex) { LOG.warn("Error from recursive parent deletion", ex); throw new MetaException("Failed to delete parent: " + ex.getMessage()); } } } if (parts != null) { if (parts != null && !parts.isEmpty() && !listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_PARTITION, new DropPartitionEvent(tbl, parts, success, deleteData, this), envContext, transactionalListenerResponses, ms); } } } } private static void addParentForDel(PriorityQueue<PathAndDepth> parentsToDelete, PathAndDepth p) { Path parent = p.path.getParent(); if (parent != null && p.depth - 1 > 0) { parentsToDelete.add(new PathAndDepth(parent, p.depth - 1)); } } private void verifyIsWritablePath(Path dir) throws MetaException { try { if (!wh.isWritable(dir.getParent())) { throw new MetaException("Table partition not deleted since " + dir.getParent() + " is not writable by " + SecurityUtils.getUser()); } } catch (IOException ex) { LOG.warn("Error from isWritable", ex); throw new MetaException("Table partition not deleted since " + dir.getParent() + " access cannot be checked: " + ex.getMessage()); } } @Override public boolean drop_partition_with_environment_context(final String db_name, final String tbl_name, final List<String> part_vals, final boolean deleteData, final EnvironmentContext envContext) throws TException { String[] parsedDbName = parseDbName(db_name, conf); startPartitionFunction("drop_partition", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals); LOG.info("Partition values:" + part_vals); boolean ret = false; Exception ex = null; try { ret = drop_partition_common(getMS(), parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals, deleteData, envContext); } catch (Exception e) { ex = e; handleException(e).convertIfInstance(IOException.class, MetaException.class) .rethrowException(e); } finally { endFunction("drop_partition", ret, ex, tbl_name); } return ret; } /** * Use {@link #get_partition_req(GetPartitionRequest)} ()} instead. * */ @Override @Deprecated public Partition get_partition(final String db_name, final String tbl_name, final List<String> part_vals) throws MetaException, NoSuchObjectException { String[] parsedDbName = parseDbName(db_name, conf); startPartitionFunction("get_partition", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals); Partition ret = null; Exception ex = null; try { authorizeTableForPartitionMetadata(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); fireReadTablePreEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); ret = getMS().getPartition(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals); ret = FilterUtils.filterPartitionIfEnabled(isServerFilterEnabled, filterHook, ret); } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, NoSuchObjectException.class).defaultMetaException(); } finally { endFunction("get_partition", ret != null, ex, tbl_name); } return ret; } @Override public GetPartitionResponse get_partition_req(GetPartitionRequest req) throws MetaException, NoSuchObjectException, TException { // TODO Move the logic from get_partition to here, as that method is getting deprecated String dbName = MetaStoreUtils.prependCatalogToDbName(req.getCatName(), req.getDbName(), conf); Partition p = get_partition(dbName, req.getTblName(), req.getPartVals()); GetPartitionResponse res = new GetPartitionResponse(); res.setPartition(p); return res; } /** * Fire a pre-event for read table operation, if there are any * pre-event listeners registered */ private void fireReadTablePreEvent(String catName, String dbName, String tblName) throws MetaException, NoSuchObjectException { if(preListeners.size() > 0) { Supplier<Table> tableSupplier = Suppliers.memoize(new Supplier<Table>() { @Override public Table get() { try { Table t = getMS().getTable(catName, dbName, tblName, null); if (t == null) { throw new NoSuchObjectException(TableName.getQualified(catName, dbName, tblName) + " table not found"); } return t; } catch(MetaException | NoSuchObjectException e) { throw new RuntimeException(e); } } }); firePreEvent(new PreReadTableEvent(tableSupplier, this)); } } @Override @Deprecated public Partition get_partition_with_auth(final String db_name, final String tbl_name, final List<String> part_vals, final String user_name, final List<String> group_names) throws TException { String[] parsedDbName = parseDbName(db_name, conf); startPartitionFunction("get_partition_with_auth", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals); fireReadTablePreEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); Partition ret = null; Exception ex = null; try { authorizeTableForPartitionMetadata(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); ret = getMS().getPartitionWithAuth(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals, user_name, group_names); ret = FilterUtils.filterPartitionIfEnabled(isServerFilterEnabled, filterHook, ret); } catch (Exception e) { ex = e; handleException(e).convertIfInstance(InvalidObjectException.class, NoSuchObjectException.class) .rethrowException(e); } finally { endFunction("get_partition_with_auth", ret != null, ex, tbl_name); } return ret; } /** * Use {@link #get_partitions_req(PartitionsRequest)} ()} instead. * */ @Override @Deprecated public List<Partition> get_partitions(final String db_name, final String tbl_name, final short max_parts) throws NoSuchObjectException, MetaException { String[] parsedDbName = parseDbName(db_name, conf); startTableFunction("get_partitions", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); fireReadTablePreEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); List<Partition> ret = null; Exception ex = null; try { checkLimitNumberOfPartitionsByFilter(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, NO_FILTER_STRING, max_parts); authorizeTableForPartitionMetadata(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); ret = getMS().getPartitions(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, max_parts); ret = FilterUtils.filterPartitionsIfEnabled(isServerFilterEnabled, filterHook, ret); } catch (Exception e) { ex = e; throwMetaException(e); } finally { endFunction("get_partitions", ret != null, ex, tbl_name); } return ret; } @Override public PartitionsResponse get_partitions_req(PartitionsRequest req) throws NoSuchObjectException, MetaException, TException { String dbName = MetaStoreUtils.prependCatalogToDbName(req.getCatName(), req.getDbName(), conf); List<Partition> partitions = get_partitions(dbName, req.getTblName(), req.getMaxParts()); PartitionsResponse res = new PartitionsResponse(); res.setPartitions(partitions); return res; } @Override @Deprecated public List<Partition> get_partitions_with_auth(final String dbName, final String tblName, final short maxParts, final String userName, final List<String> groupNames) throws TException { String[] parsedDbName = parseDbName(dbName, conf); startTableFunction("get_partitions_with_auth", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName); List<Partition> ret = null; Exception ex = null; try { checkLimitNumberOfPartitionsByFilter(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName, NO_FILTER_STRING, maxParts); authorizeTableForPartitionMetadata(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName); ret = getMS().getPartitionsWithAuth(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName, maxParts, userName, groupNames); ret = FilterUtils.filterPartitionsIfEnabled(isServerFilterEnabled, filterHook, ret); } catch (Exception e) { ex = e; handleException(e).convertIfInstance(InvalidObjectException.class, NoSuchObjectException.class) .rethrowException(e); } finally { endFunction("get_partitions_with_auth", ret != null, ex, tblName); } return ret; } private void checkLimitNumberOfPartitionsByFilter(String catName, String dbName, String tblName, String filterString, int maxParts) throws TException { if (isPartitionLimitEnabled()) { checkLimitNumberOfPartitions(tblName, get_num_partitions_by_filter(prependCatalogToDbName( catName, dbName, conf), tblName, filterString), maxParts); } } private void checkLimitNumberOfPartitionsByExpr(String catName, String dbName, String tblName, byte[] filterExpr, int maxParts) throws TException { if (isPartitionLimitEnabled()) { checkLimitNumberOfPartitions(tblName, get_num_partitions_by_expr(catName, dbName, tblName, filterExpr), maxParts); } } private boolean isPartitionLimitEnabled() { int partitionLimit = MetastoreConf.getIntVar(conf, ConfVars.LIMIT_PARTITION_REQUEST); return partitionLimit > -1; } private void checkLimitNumberOfPartitions(String tblName, int numPartitions, int maxToFetch) throws MetaException { if (isPartitionLimitEnabled()) { int partitionLimit = MetastoreConf.getIntVar(conf, ConfVars.LIMIT_PARTITION_REQUEST); int partitionRequest = (maxToFetch < 0) ? numPartitions : maxToFetch; if (partitionRequest > partitionLimit) { String configName = ConfVars.LIMIT_PARTITION_REQUEST.toString(); throw new MetaException(String.format(PARTITION_NUMBER_EXCEED_LIMIT_MSG, partitionRequest, tblName, partitionLimit, configName)); } } } @Override @Deprecated public List<PartitionSpec> get_partitions_pspec(final String db_name, final String tbl_name, final int max_parts) throws NoSuchObjectException, MetaException { String[] parsedDbName = parseDbName(db_name, conf); String tableName = tbl_name.toLowerCase(); startTableFunction("get_partitions_pspec", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName); List<PartitionSpec> partitionSpecs = null; try { Table table = get_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName); // get_partitions will parse out the catalog and db names itself List<Partition> partitions = get_partitions(db_name, tableName, (short) max_parts); if (is_partition_spec_grouping_enabled(table)) { partitionSpecs = MetaStoreServerUtils .getPartitionspecsGroupedByStorageDescriptor(table, partitions); } else { PartitionSpec pSpec = new PartitionSpec(); pSpec.setPartitionList(new PartitionListComposingSpec(partitions)); pSpec.setCatName(parsedDbName[CAT_NAME]); pSpec.setDbName(parsedDbName[DB_NAME]); pSpec.setTableName(tableName); pSpec.setRootPath(table.getSd().getLocation()); partitionSpecs = Arrays.asList(pSpec); } return partitionSpecs; } finally { endFunction("get_partitions_pspec", partitionSpecs != null && !partitionSpecs.isEmpty(), null, tbl_name); } } @Override public GetPartitionsResponse get_partitions_with_specs(GetPartitionsRequest request) throws MetaException, TException { String catName = null; if (request.isSetCatName()) { catName = request.getCatName(); } String[] parsedDbName = parseDbName(request.getDbName(), conf); String tableName = request.getTblName(); if (catName == null) { // if catName is not provided in the request use the catName parsed from the dbName catName = parsedDbName[CAT_NAME]; } startTableFunction("get_partitions_with_specs", catName, parsedDbName[DB_NAME], tableName); GetPartitionsResponse response = null; Exception ex = null; try { Table table = get_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName); List<Partition> partitions = getMS() .getPartitionSpecsByFilterAndProjection(table, request.getProjectionSpec(), request.getFilterSpec()); List<String> processorCapabilities = request.getProcessorCapabilities(); String processorId = request.getProcessorIdentifier(); if (processorCapabilities == null || processorCapabilities.size() == 0 || processorCapabilities.contains("MANAGERAWMETADATA")) { LOG.info("Skipping translation for processor with " + processorId); } else { if (transformer != null) { partitions = transformer.transformPartitions(partitions, table, processorCapabilities, processorId); } } List<PartitionSpec> partitionSpecs = MetaStoreServerUtils.getPartitionspecsGroupedByStorageDescriptor(table, partitions); response = new GetPartitionsResponse(); response.setPartitionSpec(partitionSpecs); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_partitions_with_specs", response != null, ex, tableName); } return response; } private static boolean is_partition_spec_grouping_enabled(Table table) { Map<String, String> parameters = table.getParameters(); return parameters.containsKey("hive.hcatalog.partition.spec.grouping.enabled") && parameters.get("hive.hcatalog.partition.spec.grouping.enabled").equalsIgnoreCase("true"); } @Override @Deprecated public List<String> get_partition_names(final String db_name, final String tbl_name, final short max_parts) throws NoSuchObjectException, MetaException { String[] parsedDbName = parseDbName(db_name, conf); startTableFunction("get_partition_names", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); fireReadTablePreEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); List<String> ret = null; Exception ex = null; try { authorizeTableForPartitionMetadata(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); ret = getMS().listPartitionNames(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, max_parts); ret = FilterUtils.filterPartitionNamesIfEnabled(isServerFilterEnabled, filterHook, parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, ret); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_partition_names", ret != null, ex, tbl_name); } return ret; } @Override public PartitionValuesResponse get_partition_values(PartitionValuesRequest request) throws MetaException { String catName = request.isSetCatName() ? request.getCatName() : getDefaultCatalog(conf); String dbName = request.getDbName(); String tblName = request.getTblName(); try { authorizeTableForPartitionMetadata(catName, dbName, tblName); // This is serious black magic, as the following 2 lines do nothing AFAICT but without them // the subsequent call to listPartitionValues fails. List<FieldSchema> partCols = new ArrayList<FieldSchema>(); partCols.add(request.getPartitionKeys().get(0)); return getMS().listPartitionValues(catName, dbName, tblName, request.getPartitionKeys(), request.isApplyDistinct(), request.getFilter(), request.isAscending(), request.getPartitionOrder(), request.getMaxParts()); } catch (NoSuchObjectException e) { LOG.error(String.format("Unable to get partition for %s.%s.%s", catName, dbName, tblName), e); throw new MetaException(e.getMessage()); } } @Deprecated @Override public void alter_partition(final String db_name, final String tbl_name, final Partition new_part) throws TException { rename_partition(db_name, tbl_name, null, new_part); } @Deprecated @Override public void alter_partition_with_environment_context(final String dbName, final String tableName, final Partition newPartition, final EnvironmentContext envContext) throws TException { String[] parsedDbName = parseDbName(dbName, conf); // TODO: this method name is confusing, it actually does full alter (sortof) rename_partition(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, null, newPartition, envContext, null); } @Deprecated @Override public void rename_partition(final String db_name, final String tbl_name, final List<String> part_vals, final Partition new_part) throws TException { // Call rename_partition without an environment context. String[] parsedDbName = parseDbName(db_name, conf); rename_partition(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals, new_part, null, null); } @Override public RenamePartitionResponse rename_partition_req( RenamePartitionRequest req) throws InvalidOperationException ,MetaException ,TException { rename_partition(req.getCatName(), req.getDbName(), req.getTableName(), req.getPartVals(), req.getNewPart(), null, req.getValidWriteIdList()); return new RenamePartitionResponse(); }; private void rename_partition(String catName, String db_name, String tbl_name, List<String> part_vals, Partition new_part, EnvironmentContext envContext, String validWriteIds) throws TException { startTableFunction("alter_partition", catName, db_name, tbl_name); if (LOG.isInfoEnabled()) { LOG.info("New partition values:" + new_part.getValues()); if (part_vals != null && part_vals.size() > 0) { LOG.info("Old Partition values:" + part_vals); } } // Adds the missing scheme/authority for the new partition location if (new_part.getSd() != null) { String newLocation = new_part.getSd().getLocation(); if (org.apache.commons.lang3.StringUtils.isNotEmpty(newLocation)) { Path tblPath = wh.getDnsPath(new Path(newLocation)); new_part.getSd().setLocation(tblPath.toString()); } } // Make sure the new partition has the catalog value set if (!new_part.isSetCatName()) { new_part.setCatName(catName); } Partition oldPart = null; Exception ex = null; try { Table table = null; table = getMS().getTable(catName, db_name, tbl_name, null); firePreEvent(new PreAlterPartitionEvent(db_name, tbl_name, table, part_vals, new_part, this)); if (part_vals != null && !part_vals.isEmpty()) { MetaStoreServerUtils.validatePartitionNameCharacters(new_part.getValues(), partitionValidationPattern); } oldPart = alterHandler.alterPartition(getMS(), wh, catName, db_name, tbl_name, part_vals, new_part, envContext, this, validWriteIds); // Only fetch the table if we actually have a listener if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_PARTITION, new AlterPartitionEvent(oldPart, new_part, table, false, true, new_part.getWriteId(), this), envContext); } } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidOperationException.class) .convertIfInstance(InvalidObjectException.class, InvalidOperationException.class) .convertIfInstance(AlreadyExistsException.class, InvalidOperationException.class) .defaultMetaException(); } finally { endFunction("alter_partition", oldPart != null, ex, tbl_name); } } @Override public void alter_partitions(final String db_name, final String tbl_name, final List<Partition> new_parts) throws TException { String[] o = parseDbName(db_name, conf); alter_partitions_with_environment_context(o[0], o[1], tbl_name, new_parts, null, null, -1); } @Override public AlterPartitionsResponse alter_partitions_req(AlterPartitionsRequest req) throws TException { alter_partitions_with_environment_context(req.getCatName(), req.getDbName(), req.getTableName(), req.getPartitions(), req.getEnvironmentContext(), req.isSetValidWriteIdList() ? req.getValidWriteIdList() : null, req.isSetWriteId() ? req.getWriteId() : -1); return new AlterPartitionsResponse(); } // The old API we are keeping for backward compat. Not used within Hive. @Deprecated @Override public void alter_partitions_with_environment_context(final String db_name, final String tbl_name, final List<Partition> new_parts, EnvironmentContext environmentContext) throws TException { String[] o = parseDbName(db_name, conf); alter_partitions_with_environment_context(o[0], o[1], tbl_name, new_parts, environmentContext, null, -1); } private void alter_partitions_with_environment_context(String catName, String db_name, final String tbl_name, final List<Partition> new_parts, EnvironmentContext environmentContext, String writeIdList, long writeId) throws TException { if (environmentContext == null) { environmentContext = new EnvironmentContext(); } if (catName == null) { catName = MetaStoreUtils.getDefaultCatalog(conf); } startTableFunction("alter_partitions", catName, db_name, tbl_name); if (LOG.isInfoEnabled()) { for (Partition tmpPart : new_parts) { LOG.info("New partition values:" + tmpPart.getValues()); } } // all partitions are altered atomically // all prehooks are fired together followed by all post hooks List<Partition> oldParts = null; Exception ex = null; Lock tableLock = getTableLockFor(db_name, tbl_name); tableLock.lock(); try { Table table = null; table = getMS().getTable(catName, db_name, tbl_name, null); for (Partition tmpPart : new_parts) { // Make sure the catalog name is set in the new partition if (!tmpPart.isSetCatName()) { tmpPart.setCatName(getDefaultCatalog(conf)); } firePreEvent(new PreAlterPartitionEvent(db_name, tbl_name, table, null, tmpPart, this)); } oldParts = alterHandler.alterPartitions(getMS(), wh, catName, db_name, tbl_name, new_parts, environmentContext, writeIdList, writeId, this); Iterator<Partition> olditr = oldParts.iterator(); for (Partition tmpPart : new_parts) { Partition oldTmpPart; if (olditr.hasNext()) { oldTmpPart = olditr.next(); } else { throw new InvalidOperationException("failed to alterpartitions"); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_PARTITION, new AlterPartitionEvent(oldTmpPart, tmpPart, table, false, true, writeId, this)); } } } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidOperationException.class) .convertIfInstance(InvalidObjectException.class, InvalidOperationException.class) .convertIfInstance(AlreadyExistsException.class, InvalidOperationException.class) .defaultMetaException(); } finally { tableLock.unlock(); endFunction("alter_partition", oldParts != null, ex, tbl_name); } } @Override public String getVersion() throws TException { String version = MetastoreVersionInfo.getVersion(); endFunction(startFunction("getVersion"), version != null, null); return version; } @Override public void alter_table(final String dbname, final String name, final Table newTable) throws InvalidOperationException, MetaException { // Do not set an environment context. String[] parsedDbName = parseDbName(dbname, conf); alter_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], name, newTable, null, null, null, null); } @Override public void alter_table_with_cascade(final String dbname, final String name, final Table newTable, final boolean cascade) throws InvalidOperationException, MetaException { EnvironmentContext envContext = null; if (cascade) { envContext = new EnvironmentContext(); envContext.putToProperties(StatsSetupConst.CASCADE, StatsSetupConst.TRUE); } String[] parsedDbName = parseDbName(dbname, conf); alter_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], name, newTable, envContext, null, null, null); } @Override public AlterTableResponse alter_table_req(AlterTableRequest req) throws InvalidOperationException, MetaException, TException { alter_table_core(req.getCatName(), req.getDbName(), req.getTableName(), req.getTable(), req.getEnvironmentContext(), req.getValidWriteIdList(), req.getProcessorCapabilities(), req.getProcessorIdentifier()); return new AlterTableResponse(); } @Override public void alter_table_with_environment_context(final String dbname, final String name, final Table newTable, final EnvironmentContext envContext) throws InvalidOperationException, MetaException { String[] parsedDbName = parseDbName(dbname, conf); alter_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], name, newTable, envContext, null, null, null); } private void alter_table_core(String catName, String dbname, String name, Table newTable, EnvironmentContext envContext, String validWriteIdList, List<String> processorCapabilities, String processorId) throws InvalidOperationException, MetaException { startFunction("alter_table", ": " + TableName.getQualified(catName, dbname, name) + " newtbl=" + newTable.getTableName()); if (envContext == null) { envContext = new EnvironmentContext(); } if (catName == null) { catName = MetaStoreUtils.getDefaultCatalog(conf); } // HIVE-25282: Drop/Alter table in REMOTE db should fail try { Database db = get_database_core(catName, dbname); if (db != null && db.getType().equals(DatabaseType.REMOTE)) { throw new MetaException("Alter table in REMOTE database " + db.getName() + " is not allowed"); } } catch (NoSuchObjectException e) { throw new InvalidOperationException("Alter table in REMOTE database is not allowed"); } // Update the time if it hasn't been specified. if (newTable.getParameters() == null || newTable.getParameters().get(hive_metastoreConstants.DDL_TIME) == null) { newTable.putToParameters(hive_metastoreConstants.DDL_TIME, Long.toString(System .currentTimeMillis() / 1000)); } // Adds the missing scheme/authority for the new table location if (newTable.getSd() != null) { String newLocation = newTable.getSd().getLocation(); if (org.apache.commons.lang3.StringUtils.isNotEmpty(newLocation)) { Path tblPath = wh.getDnsPath(new Path(newLocation)); newTable.getSd().setLocation(tblPath.toString()); } } // Set the catalog name if it hasn't been set in the new table if (!newTable.isSetCatName()) { newTable.setCatName(catName); } boolean success = false; Exception ex = null; try { GetTableRequest request = new GetTableRequest(dbname, name); request.setCatName(catName); Table oldt = get_table_core(request); if (transformer != null) { newTable = transformer.transformAlterTable(oldt, newTable, processorCapabilities, processorId); } firePreEvent(new PreAlterTableEvent(oldt, newTable, this)); alterHandler.alterTable(getMS(), wh, catName, dbname, name, newTable, envContext, this, validWriteIdList); success = true; } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(MetaException.class, InvalidOperationException.class) .convertIfInstance(NoSuchObjectException.class, InvalidOperationException.class) .defaultMetaException(); } finally { endFunction("alter_table", success, ex, name); } } @Override public List<String> get_tables(final String dbname, final String pattern) throws MetaException { startFunction("get_tables", ": db=" + dbname + " pat=" + pattern); List<String> ret = null; Exception ex = null; String[] parsedDbName = parseDbName(dbname, conf); try { if (isDatabaseRemote(dbname)) { Database db = get_database_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME]); return DataConnectorProviderFactory.getDataConnectorProvider(db).getTableNames(); } } catch (Exception e) { /* appears we return empty set instead of throwing an exception */ } try { ret = getMS().getTables(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], pattern); if(ret != null && !ret.isEmpty()) { List<Table> tableInfo = new ArrayList<>(); tableInfo = getMS().getTableObjectsByName(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], ret); tableInfo = FilterUtils.filterTablesIfEnabled(isServerFilterEnabled, filterHook, tableInfo);// tableInfo object has the owner information of the table which is being passed to FilterUtils. ret = new ArrayList<>(); for (Table tbl : tableInfo) { ret.add(tbl.getTableName()); } } } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_tables", ret != null, ex); } return ret; } @Override public List<String> get_tables_by_type(final String dbname, final String pattern, final String tableType) throws MetaException { startFunction("get_tables_by_type", ": db=" + dbname + " pat=" + pattern + ",type=" + tableType); List<String> ret = null; Exception ex = null; String[] parsedDbName = parseDbName(dbname, conf); try { ret = getTablesByTypeCore(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], pattern, tableType); ret = FilterUtils.filterTableNamesIfEnabled(isServerFilterEnabled, filterHook, parsedDbName[CAT_NAME], parsedDbName[DB_NAME], ret); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_tables_by_type", ret != null, ex); } return ret; } private List<String> getTablesByTypeCore(final String catName, final String dbname, final String pattern, final String tableType) throws MetaException { startFunction("getTablesByTypeCore", ": catName=" + catName + ": db=" + dbname + " pat=" + pattern + ",type=" + tableType); List<String> ret = null; Exception ex = null; Database db = null; try { db = get_database_core(catName, dbname); if (db != null) { if (db.getType().equals(DatabaseType.REMOTE)) { return DataConnectorProviderFactory.getDataConnectorProvider(db).getTableNames(); } } } catch (Exception e) { /* ignore */ } try { ret = getMS().getTables(catName, dbname, pattern, TableType.valueOf(tableType), -1); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("getTablesByTypeCore", ret != null, ex); } return ret; } @Override public List<Table> get_all_materialized_view_objects_for_rewriting() throws MetaException { startFunction("get_all_materialized_view_objects_for_rewriting"); List<Table> ret = null; Exception ex = null; try { ret = getMS().getAllMaterializedViewObjectsForRewriting(DEFAULT_CATALOG_NAME); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_all_materialized_view_objects_for_rewriting", ret != null, ex); } return ret; } @Override public List<String> get_materialized_views_for_rewriting(final String dbname) throws MetaException { startFunction("get_materialized_views_for_rewriting", ": db=" + dbname); List<String> ret = null; Exception ex = null; String[] parsedDbName = parseDbName(dbname, conf); try { ret = getMS().getMaterializedViewsForRewriting(parsedDbName[CAT_NAME], parsedDbName[DB_NAME]); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_materialized_views_for_rewriting", ret != null, ex); } return ret; } @Override public List<String> get_all_tables(final String dbname) throws MetaException { startFunction("get_all_tables", ": db=" + dbname); List<String> ret = null; Exception ex = null; String[] parsedDbName = parseDbName(dbname, conf); try { ret = getMS().getAllTables(parsedDbName[CAT_NAME], parsedDbName[DB_NAME]); ret = FilterUtils.filterTableNamesIfEnabled(isServerFilterEnabled, filterHook, parsedDbName[CAT_NAME], parsedDbName[DB_NAME], ret); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_all_tables", ret != null, ex); } return ret; } /** * Use {@link #get_fields_req(GetFieldsRequest)} ()} instead. * */ @Override @Deprecated public List<FieldSchema> get_fields(String db, String tableName) throws MetaException, UnknownTableException, UnknownDBException { return get_fields_with_environment_context(db, tableName, null); } @Override @Deprecated public List<FieldSchema> get_fields_with_environment_context(String db, String tableName, final EnvironmentContext envContext) throws MetaException, UnknownTableException, UnknownDBException { startFunction("get_fields_with_environment_context", ": db=" + db + "tbl=" + tableName); String[] names = tableName.split("\\."); String base_table_name = names[0]; String[] parsedDbName = parseDbName(db, conf); Table tbl; List<FieldSchema> ret = null; Exception ex = null; try { try { tbl = get_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], base_table_name); firePreEvent(new PreReadTableEvent(tbl, this)); } catch (NoSuchObjectException e) { throw new UnknownTableException(e.getMessage()); } if (null == tbl.getSd().getSerdeInfo().getSerializationLib() || MetastoreConf.getStringCollection(conf, ConfVars.SERDES_USING_METASTORE_FOR_SCHEMA).contains( tbl.getSd().getSerdeInfo().getSerializationLib())) { ret = tbl.getSd().getCols(); } else { StorageSchemaReader schemaReader = getStorageSchemaReader(); ret = schemaReader.readSchema(tbl, envContext, getConf()); } } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(UnknownTableException.class, MetaException.class).defaultMetaException(); } finally { endFunction("get_fields_with_environment_context", ret != null, ex, tableName); } return ret; } @Override public GetFieldsResponse get_fields_req(GetFieldsRequest req) throws MetaException, UnknownTableException, UnknownDBException, TException { String dbName = MetaStoreUtils.prependCatalogToDbName(req.getCatName(), req.getDbName(), conf); List<FieldSchema> fields = get_fields_with_environment_context( dbName, req.getTblName(), req.getEnvContext()); GetFieldsResponse res = new GetFieldsResponse(); res.setFields(fields); return res; } private StorageSchemaReader getStorageSchemaReader() throws MetaException { if (storageSchemaReader == null) { String className = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.STORAGE_SCHEMA_READER_IMPL); Class<? extends StorageSchemaReader> readerClass = JavaUtils.getClass(className, StorageSchemaReader.class); try { storageSchemaReader = readerClass.newInstance(); } catch (InstantiationException|IllegalAccessException e) { LOG.error("Unable to instantiate class " + className, e); throw new MetaException(e.getMessage()); } } return storageSchemaReader; } /** * Use {@link #get_schema_req(GetSchemaRequest)} ()} instead. * */ @Override @Deprecated public List<FieldSchema> get_schema(String db, String tableName) throws MetaException, UnknownTableException, UnknownDBException { return get_schema_with_environment_context(db,tableName, null); } /** * Return the schema of the table. This function includes partition columns * in addition to the regular columns. * * @param db * Name of the database * @param tableName * Name of the table * @param envContext * Store session based properties * @return List of columns, each column is a FieldSchema structure * @throws MetaException * @throws UnknownTableException * @throws UnknownDBException */ @Override @Deprecated public List<FieldSchema> get_schema_with_environment_context(String db, String tableName, final EnvironmentContext envContext) throws MetaException, UnknownTableException, UnknownDBException { startFunction("get_schema_with_environment_context", ": db=" + db + "tbl=" + tableName); boolean success = false; Exception ex = null; try { String[] names = tableName.split("\\."); String base_table_name = names[0]; String[] parsedDbName = parseDbName(db, conf); Table tbl; try { tbl = get_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], base_table_name); } catch (NoSuchObjectException e) { throw new UnknownTableException(e.getMessage()); } // Pass unparsed db name here List<FieldSchema> fieldSchemas = get_fields_with_environment_context(db, base_table_name, envContext); if (tbl == null || fieldSchemas == null) { throw new UnknownTableException(tableName + " doesn't exist"); } if (tbl.getPartitionKeys() != null) { // Combine the column field schemas and the partition keys to create the // whole schema fieldSchemas.addAll(tbl.getPartitionKeys()); } success = true; return fieldSchemas; } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(UnknownDBException.class, UnknownTableException.class, MetaException.class) .defaultMetaException(); } finally { endFunction("get_schema_with_environment_context", success, ex, tableName); } } @Override public GetSchemaResponse get_schema_req(GetSchemaRequest req) throws MetaException, UnknownTableException, UnknownDBException, TException { String dbName = MetaStoreUtils.prependCatalogToDbName(req.getCatName(), req.getDbName(), conf); List<FieldSchema> fields = get_schema_with_environment_context( dbName, req.getTblName(), req.getEnvContext()); GetSchemaResponse res = new GetSchemaResponse(); res.setFields(fields); return res; } @Override public String getCpuProfile(int profileDurationInSec) throws TException { return ""; } /** * Returns the value of the given configuration variable name. If the * configuration variable with the given name doesn't exist, or if there * were an exception thrown while retrieving the variable, or if name is * null, defaultValue is returned. */ @Override public String get_config_value(String name, String defaultValue) throws TException { startFunction("get_config_value", ": name=" + name + " defaultValue=" + defaultValue); boolean success = false; Exception ex = null; try { if (name == null) { success = true; return defaultValue; } // Allow only keys that start with hive.*, hdfs.*, mapred.* for security // i.e. don't allow access to db password if (!Pattern.matches("(hive|hdfs|mapred|metastore).*", name)) { throw new ConfigValSecurityException("For security reasons, the " + "config key " + name + " cannot be accessed"); } String toReturn = defaultValue; try { toReturn = MetastoreConf.get(conf, name); if (toReturn == null) { toReturn = defaultValue; } } catch (RuntimeException e) { LOG.error(threadLocalId.get().toString() + ": " + "RuntimeException thrown in get_config_value - msg: " + e.getMessage() + " cause: " + e.getCause()); } success = true; return toReturn; } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(TException.class).defaultMetaException(); } finally { endFunction("get_config_value", success, ex); } } public static List<String> getPartValsFromName(Table t, String partName) throws MetaException, InvalidObjectException { Preconditions.checkArgument(t != null, "Table can not be null"); // Unescape the partition name LinkedHashMap<String, String> hm = Warehouse.makeSpecFromName(partName); List<String> partVals = new ArrayList<>(); for (FieldSchema field : t.getPartitionKeys()) { String key = field.getName(); String val = hm.get(key); if (val == null) { throw new InvalidObjectException("incomplete partition name - missing " + key); } partVals.add(val); } return partVals; } private List<String> getPartValsFromName(RawStore ms, String catName, String dbName, String tblName, String partName) throws MetaException, InvalidObjectException { Table t = ms.getTable(catName, dbName, tblName, null); if (t == null) { throw new InvalidObjectException(dbName + "." + tblName + " table not found"); } return getPartValsFromName(t, partName); } private Partition get_partition_by_name_core(final RawStore ms, final String catName, final String db_name, final String tbl_name, final String part_name) throws TException { fireReadTablePreEvent(catName, db_name, tbl_name); List<String> partVals; try { partVals = getPartValsFromName(ms, catName, db_name, tbl_name, part_name); } catch (InvalidObjectException e) { throw new NoSuchObjectException(e.getMessage()); } Partition p = ms.getPartition(catName, db_name, tbl_name, partVals); p = FilterUtils.filterPartitionIfEnabled(isServerFilterEnabled, filterHook, p); if (p == null) { throw new NoSuchObjectException(TableName.getQualified(catName, db_name, tbl_name) + " partition (" + part_name + ") not found"); } return p; } @Override @Deprecated public Partition get_partition_by_name(final String db_name, final String tbl_name, final String part_name) throws TException { String[] parsedDbName = parseDbName(db_name, conf); startFunction("get_partition_by_name", ": tbl=" + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name) + " part=" + part_name); Partition ret = null; Exception ex = null; try { ret = get_partition_by_name_core(getMS(), parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_name); ret = FilterUtils.filterPartitionIfEnabled(isServerFilterEnabled, filterHook, ret); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_partition_by_name", ret != null, ex, tbl_name); } return ret; } @Override public Partition append_partition_by_name(final String db_name, final String tbl_name, final String part_name) throws TException { return append_partition_by_name_with_environment_context(db_name, tbl_name, part_name, null); } @Override public Partition append_partition_by_name_with_environment_context(final String db_name, final String tbl_name, final String part_name, final EnvironmentContext env_context) throws TException { String[] parsedDbName = parseDbName(db_name, conf); startFunction("append_partition_by_name", ": tbl=" + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name) + " part=" + part_name); Partition ret = null; Exception ex = null; try { RawStore ms = getMS(); List<String> partVals = getPartValsFromName(ms, parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_name); ret = append_partition_common(ms, parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, partVals, env_context); } catch (Exception e) { ex = e; throw handleException(e) .throwIfInstance(InvalidObjectException.class, AlreadyExistsException.class, MetaException.class) .defaultMetaException(); } finally { endFunction("append_partition_by_name", ret != null, ex, tbl_name); } return ret; } private boolean drop_partition_by_name_core(final RawStore ms, final String catName, final String db_name, final String tbl_name, final String part_name, final boolean deleteData, final EnvironmentContext envContext) throws TException, IOException { List<String> partVals; try { partVals = getPartValsFromName(ms, catName, db_name, tbl_name, part_name); } catch (InvalidObjectException e) { throw new NoSuchObjectException(e.getMessage()); } return drop_partition_common(ms, catName, db_name, tbl_name, partVals, deleteData, envContext); } @Override public boolean drop_partition_by_name(final String db_name, final String tbl_name, final String part_name, final boolean deleteData) throws TException { return drop_partition_by_name_with_environment_context(db_name, tbl_name, part_name, deleteData, null); } @Override public boolean drop_partition_by_name_with_environment_context(final String db_name, final String tbl_name, final String part_name, final boolean deleteData, final EnvironmentContext envContext) throws TException { String[] parsedDbName = parseDbName(db_name, conf); startFunction("drop_partition_by_name", ": tbl=" + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name) + " part=" + part_name); boolean ret = false; Exception ex = null; try { ret = drop_partition_by_name_core(getMS(), parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_name, deleteData, envContext); } catch (Exception e) { ex = e; handleException(e).convertIfInstance(IOException.class, MetaException.class).rethrowException(e); } finally { endFunction("drop_partition_by_name", ret, ex, tbl_name); } return ret; } @Override @Deprecated public List<Partition> get_partitions_ps(final String db_name, final String tbl_name, final List<String> part_vals, final short max_parts) throws TException { String[] parsedDbName = parseDbName(db_name, conf); startPartitionFunction("get_partitions_ps", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals); List<Partition> ret = null; Exception ex = null; try { authorizeTableForPartitionMetadata(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); // Don't send the parsedDbName, as this method will parse itself. ret = get_partitions_ps_with_auth(db_name, tbl_name, part_vals, max_parts, null, null); ret = FilterUtils.filterPartitionsIfEnabled(isServerFilterEnabled, filterHook, ret); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_partitions_ps", ret != null, ex, tbl_name); } return ret; } /** * Use {@link #get_partitions_ps_with_auth_req(GetPartitionsPsWithAuthRequest)} ()} instead. * */ @Override @Deprecated public List<Partition> get_partitions_ps_with_auth(final String db_name, final String tbl_name, final List<String> part_vals, final short max_parts, final String userName, final List<String> groupNames) throws TException { String[] parsedDbName = parseDbName(db_name, conf); startPartitionFunction("get_partitions_ps_with_auth", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals); fireReadTablePreEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); List<Partition> ret = null; Exception ex = null; try { authorizeTableForPartitionMetadata(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); ret = getMS().listPartitionsPsWithAuth(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals, max_parts, userName, groupNames); ret = FilterUtils.filterPartitionsIfEnabled(isServerFilterEnabled, filterHook, ret); } catch (Exception e) { ex = e; handleException(e).convertIfInstance(InvalidObjectException.class, MetaException.class).rethrowException(e); } finally { endFunction("get_partitions_ps_with_auth", ret != null, ex, tbl_name); } return ret; } @Override public GetPartitionsPsWithAuthResponse get_partitions_ps_with_auth_req(GetPartitionsPsWithAuthRequest req) throws MetaException, NoSuchObjectException, TException { String dbName = MetaStoreUtils.prependCatalogToDbName(req.getCatName(), req.getDbName(), conf); List<Partition> partitions = null; if (req.getPartVals() == null) { partitions = get_partitions_with_auth(dbName, req.getTblName(), req.getMaxParts(), req.getUserName(), req.getGroupNames()); } else { partitions = get_partitions_ps_with_auth(dbName, req.getTblName(), req.getPartVals(), req.getMaxParts(), req.getUserName(), req.getGroupNames()); } GetPartitionsPsWithAuthResponse res = new GetPartitionsPsWithAuthResponse(); res.setPartitions(partitions); return res; } /** * Use {@link #get_partition_names_ps_req(GetPartitionNamesPsRequest)} ()} instead. * */ @Override @Deprecated public List<String> get_partition_names_ps(final String db_name, final String tbl_name, final List<String> part_vals, final short max_parts) throws TException { String[] parsedDbName = parseDbName(db_name, conf); startPartitionFunction("get_partitions_names_ps", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals); fireReadTablePreEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); List<String> ret = null; Exception ex = null; try { authorizeTableForPartitionMetadata(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name); ret = getMS().listPartitionNamesPs(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, part_vals, max_parts); ret = FilterUtils.filterPartitionNamesIfEnabled(isServerFilterEnabled, filterHook, parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, ret); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_partitions_names_ps", ret != null, ex, tbl_name); } return ret; } @Override public GetPartitionNamesPsResponse get_partition_names_ps_req(GetPartitionNamesPsRequest req) throws MetaException, NoSuchObjectException, TException { String dbName = MetaStoreUtils.prependCatalogToDbName(req.getCatName(), req.getDbName(), conf); List<String> names = get_partition_names_ps(dbName, req.getTblName(), req.getPartValues(), req.getMaxParts()); GetPartitionNamesPsResponse res = new GetPartitionNamesPsResponse(); res.setNames(names); return res; } @Override public List<String> get_partition_names_req(PartitionsByExprRequest req) throws MetaException, NoSuchObjectException, TException { String catName = req.isSetCatName() ? req.getCatName() : getDefaultCatalog(conf); String dbName = req.getDbName(), tblName = req.getTblName(); startTableFunction("get_partition_names_req", catName, dbName, tblName); fireReadTablePreEvent(catName, dbName, tblName); List<String> ret = null; Exception ex = null; try { authorizeTableForPartitionMetadata(catName, dbName, tblName); ret = getMS().listPartitionNames(catName, dbName, tblName, req.getDefaultPartitionName(), req.getExpr(), req.getOrder(), req.getMaxParts()); ret = FilterUtils.filterPartitionNamesIfEnabled(isServerFilterEnabled, filterHook, catName, dbName, tblName, ret); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_partition_names_req", ret != null, ex, tblName); } return ret; } @Override public List<String> partition_name_to_vals(String part_name) throws TException { if (part_name.length() == 0) { return Collections.emptyList(); } LinkedHashMap<String, String> map = Warehouse.makeSpecFromName(part_name); return new ArrayList<>(map.values()); } @Override public Map<String, String> partition_name_to_spec(String part_name) throws TException { if (part_name.length() == 0) { return new HashMap<>(); } return Warehouse.makeSpecFromName(part_name); } public static String lowerCaseConvertPartName(String partName) throws MetaException { if (partName == null) { return partName; } boolean isFirst = true; Map<String, String> partSpec = Warehouse.makeEscSpecFromName(partName); String convertedPartName = new String(); for (Map.Entry<String, String> entry : partSpec.entrySet()) { String partColName = entry.getKey(); String partColVal = entry.getValue(); if (!isFirst) { convertedPartName += "/"; } else { isFirst = false; } convertedPartName += partColName.toLowerCase() + "=" + partColVal; } return convertedPartName; } @Override @Deprecated public ColumnStatistics get_table_column_statistics(String dbName, String tableName, String colName) throws TException { String[] parsedDbName = parseDbName(dbName, conf); parsedDbName[CAT_NAME] = parsedDbName[CAT_NAME].toLowerCase(); parsedDbName[DB_NAME] = parsedDbName[DB_NAME].toLowerCase(); tableName = tableName.toLowerCase(); colName = colName.toLowerCase(); startFunction("get_column_statistics_by_table", ": table=" + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName) + " column=" + colName); ColumnStatistics statsObj = null; try { statsObj = getMS().getTableColumnStatistics( parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, Lists.newArrayList(colName), "hive", null); if (statsObj != null) { assert statsObj.getStatsObjSize() <= 1; } return statsObj; } finally { endFunction("get_column_statistics_by_table", statsObj != null, null, tableName); } } @Override public TableStatsResult get_table_statistics_req(TableStatsRequest request) throws TException { String catName = request.isSetCatName() ? request.getCatName().toLowerCase() : getDefaultCatalog(conf); String dbName = request.getDbName().toLowerCase(); String tblName = request.getTblName().toLowerCase(); startFunction("get_table_statistics_req", ": table=" + TableName.getQualified(catName, dbName, tblName)); TableStatsResult result = null; List<String> lowerCaseColNames = new ArrayList<>(request.getColNames().size()); for (String colName : request.getColNames()) { lowerCaseColNames.add(colName.toLowerCase()); } try { ColumnStatistics cs = getMS().getTableColumnStatistics( catName, dbName, tblName, lowerCaseColNames, request.getEngine(), request.getValidWriteIdList()); // Note: stats compliance is not propagated to the client; instead, we just return nothing // if stats are not compliant for now. This won't work for stats merging, but that // is currently only done on metastore size (see set_aggr...). // For some optimizations we might make use of incorrect stats that are "better than // nothing", so this may change in future. result = new TableStatsResult((cs == null || cs.getStatsObj() == null || (cs.isSetIsStatsCompliant() && !cs.isIsStatsCompliant())) ? Lists.newArrayList() : cs.getStatsObj()); } finally { endFunction("get_table_statistics_req", result == null, null, tblName); } return result; } @Override @Deprecated public ColumnStatistics get_partition_column_statistics(String dbName, String tableName, String partName, String colName) throws TException { // Note: this method appears to be unused within Hive. // It doesn't take txn stats into account. dbName = dbName.toLowerCase(); String[] parsedDbName = parseDbName(dbName, conf); tableName = tableName.toLowerCase(); colName = colName.toLowerCase(); String convertedPartName = lowerCaseConvertPartName(partName); startFunction("get_column_statistics_by_partition", ": table=" + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName) + " partition=" + convertedPartName + " column=" + colName); ColumnStatistics statsObj = null; try { List<ColumnStatistics> list = getMS().getPartitionColumnStatistics( parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, Lists.newArrayList(convertedPartName), Lists.newArrayList(colName), "hive"); if (list.isEmpty()) { return null; } if (list.size() != 1) { throw new MetaException(list.size() + " statistics for single column and partition"); } statsObj = list.get(0); } finally { endFunction("get_column_statistics_by_partition", statsObj != null, null, tableName); } return statsObj; } @Override public PartitionsStatsResult get_partitions_statistics_req(PartitionsStatsRequest request) throws TException { String catName = request.isSetCatName() ? request.getCatName().toLowerCase() : getDefaultCatalog(conf); String dbName = request.getDbName().toLowerCase(); String tblName = request.getTblName().toLowerCase(); startFunction("get_partitions_statistics_req", ": table=" + TableName.getQualified(catName, dbName, tblName)); PartitionsStatsResult result = null; List<String> lowerCaseColNames = new ArrayList<>(request.getColNames().size()); for (String colName : request.getColNames()) { lowerCaseColNames.add(colName.toLowerCase()); } List<String> lowerCasePartNames = new ArrayList<>(request.getPartNames().size()); for (String partName : request.getPartNames()) { lowerCasePartNames.add(lowerCaseConvertPartName(partName)); } try { List<ColumnStatistics> stats = getMS().getPartitionColumnStatistics( catName, dbName, tblName, lowerCasePartNames, lowerCaseColNames, request.getEngine(), request.isSetValidWriteIdList() ? request.getValidWriteIdList() : null); Map<String, List<ColumnStatisticsObj>> map = new HashMap<>(); if (stats != null) { for (ColumnStatistics stat : stats) { // Note: stats compliance is not propagated to the client; instead, we just return nothing // if stats are not compliant for now. This won't work for stats merging, but that // is currently only done on metastore size (see set_aggr...). // For some optimizations we might make use of incorrect stats that are "better than // nothing", so this may change in future. if (stat.isSetIsStatsCompliant() && !stat.isIsStatsCompliant()) { continue; } map.put(stat.getStatsDesc().getPartName(), stat.getStatsObj()); } } result = new PartitionsStatsResult(map); } finally { endFunction("get_partitions_statistics_req", result == null, null, tblName); } return result; } @Override public boolean update_table_column_statistics(ColumnStatistics colStats) throws TException { // Deprecated API, won't work for transactional tables return updateTableColumnStatsInternal(colStats, null, -1); } @Override public SetPartitionsStatsResponse update_table_column_statistics_req( SetPartitionsStatsRequest req) throws NoSuchObjectException, InvalidObjectException, MetaException, InvalidInputException, TException { if (req.getColStatsSize() != 1) { throw new InvalidInputException("Only one stats object expected"); } if (req.isNeedMerge()) { throw new InvalidInputException("Merge is not supported for non-aggregate stats"); } ColumnStatistics colStats = req.getColStatsIterator().next(); boolean ret = updateTableColumnStatsInternal(colStats, req.getValidWriteIdList(), req.getWriteId()); return new SetPartitionsStatsResponse(ret); } private boolean updateTableColumnStatsInternal(ColumnStatistics colStats, String validWriteIds, long writeId) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException { normalizeColStatsInput(colStats); startFunction("write_column_statistics", ": table=" + TableName.getQualified( colStats.getStatsDesc().getCatName(), colStats.getStatsDesc().getDbName(), colStats.getStatsDesc().getTableName())); Map<String, String> parameters = null; getMS().openTransaction(); boolean committed = false; try { parameters = getMS().updateTableColumnStatistics(colStats, validWriteIds, writeId); if (parameters != null) { Table tableObj = getMS().getTable(colStats.getStatsDesc().getCatName(), colStats.getStatsDesc().getDbName(), colStats.getStatsDesc().getTableName(), validWriteIds); if (transactionalListeners != null && !transactionalListeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.UPDATE_TABLE_COLUMN_STAT, new UpdateTableColumnStatEvent(colStats, tableObj, parameters, writeId, this)); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.UPDATE_TABLE_COLUMN_STAT, new UpdateTableColumnStatEvent(colStats, tableObj, parameters, writeId,this)); } } committed = getMS().commitTransaction(); } finally { if (!committed) { getMS().rollbackTransaction(); } endFunction("write_column_statistics", parameters != null, null, colStats.getStatsDesc().getTableName()); } return parameters != null; } private void normalizeColStatsInput(ColumnStatistics colStats) throws MetaException { // TODO: is this really needed? this code is propagated from HIVE-1362 but most of it is useless. ColumnStatisticsDesc statsDesc = colStats.getStatsDesc(); statsDesc.setCatName(statsDesc.isSetCatName() ? statsDesc.getCatName().toLowerCase() : getDefaultCatalog(conf)); statsDesc.setDbName(statsDesc.getDbName().toLowerCase()); statsDesc.setTableName(statsDesc.getTableName().toLowerCase()); statsDesc.setPartName(lowerCaseConvertPartName(statsDesc.getPartName())); long time = System.currentTimeMillis() / 1000; statsDesc.setLastAnalyzed(time); for (ColumnStatisticsObj statsObj : colStats.getStatsObj()) { statsObj.setColName(statsObj.getColName().toLowerCase()); statsObj.setColType(statsObj.getColType().toLowerCase()); } colStats.setStatsDesc(statsDesc); colStats.setStatsObj(colStats.getStatsObj()); } private boolean updatePartitonColStatsInternal(Table tbl, ColumnStatistics colStats, String validWriteIds, long writeId) throws MetaException, InvalidObjectException, NoSuchObjectException, InvalidInputException { normalizeColStatsInput(colStats); ColumnStatisticsDesc csd = colStats.getStatsDesc(); String catName = csd.getCatName(), dbName = csd.getDbName(), tableName = csd.getTableName(); startFunction("write_partition_column_statistics", ": db=" + dbName + " table=" + tableName + " part=" + csd.getPartName()); boolean ret = false; Map<String, String> parameters; List<String> partVals; boolean committed = false; getMS().openTransaction(); try { if (tbl == null) { tbl = getTable(catName, dbName, tableName); } partVals = getPartValsFromName(tbl, csd.getPartName()); parameters = getMS().updatePartitionColumnStatistics(colStats, partVals, validWriteIds, writeId); if (parameters != null) { if (transactionalListeners != null && !transactionalListeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.UPDATE_PARTITION_COLUMN_STAT, new UpdatePartitionColumnStatEvent(colStats, partVals, parameters, tbl, writeId, this)); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.UPDATE_PARTITION_COLUMN_STAT, new UpdatePartitionColumnStatEvent(colStats, partVals, parameters, tbl, writeId, this)); } } committed = getMS().commitTransaction(); } finally { if (!committed) { getMS().rollbackTransaction(); } endFunction("write_partition_column_statistics", ret != false, null, tableName); } return parameters != null; } private void updatePartitionColStatsForOneBatch(Table tbl, Map<String, ColumnStatistics> statsMap, String validWriteIds, long writeId) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException { Map<String, Map<String, String>> result = getMS().updatePartitionColumnStatisticsInBatch(statsMap, tbl, transactionalListeners, validWriteIds, writeId); if (result != null && result.size() != 0 && listeners != null) { // The normal listeners, unlike transaction listeners are not using the same transactions used by the update // operations. So there is no need of keeping them within the same transactions. If notification to one of // the listeners failed, then even if we abort the transaction, we can not revert the notifications sent to the // other listeners. for (Map.Entry entry : result.entrySet()) { Map<String, String> parameters = (Map<String, String>) entry.getValue(); ColumnStatistics colStats = statsMap.get(entry.getKey()); List<String> partVals = getPartValsFromName(tbl, colStats.getStatsDesc().getPartName()); MetaStoreListenerNotifier.notifyEvent(listeners, EventMessage.EventType.UPDATE_PARTITION_COLUMN_STAT, new UpdatePartitionColumnStatEvent(colStats, partVals, parameters, tbl, writeId, this)); } } } private boolean updatePartitionColStatsInBatch(Table tbl, Map<String, ColumnStatistics> statsMap, String validWriteIds, long writeId) throws MetaException, InvalidObjectException, NoSuchObjectException, InvalidInputException { if (statsMap.size() == 0) { return false; } String catalogName = tbl.getCatName(); String dbName = tbl.getDbName(); String tableName = tbl.getTableName(); startFunction("updatePartitionColStatsInBatch", ": db=" + dbName + " table=" + tableName); long start = System.currentTimeMillis(); Map<String, ColumnStatistics> newStatsMap = new HashMap<>(); long numStats = 0; long numStatsMax = MetastoreConf.getIntVar(conf, ConfVars.JDBC_MAX_BATCH_SIZE); try { for (Map.Entry entry : statsMap.entrySet()) { ColumnStatistics colStats = (ColumnStatistics) entry.getValue(); normalizeColStatsInput(colStats); assert catalogName.equalsIgnoreCase(colStats.getStatsDesc().getCatName()); assert dbName.equalsIgnoreCase(colStats.getStatsDesc().getDbName()); assert tableName.equalsIgnoreCase(colStats.getStatsDesc().getTableName()); newStatsMap.put((String) entry.getKey(), colStats); numStats += colStats.getStatsObjSize(); if (newStatsMap.size() >= numStatsMax) { updatePartitionColStatsForOneBatch(tbl, newStatsMap, validWriteIds, writeId); newStatsMap.clear(); numStats = 0; } } if (numStats != 0) { updatePartitionColStatsForOneBatch(tbl, newStatsMap, validWriteIds, writeId); } } finally { endFunction("updatePartitionColStatsInBatch", true, null, tableName); long end = System.currentTimeMillis(); float sec = (end - start) / 1000F; LOG.info("updatePartitionColStatsInBatch took " + sec + " seconds for " + statsMap.size() + " stats"); } return true; } @Override public boolean update_partition_column_statistics(ColumnStatistics colStats) throws TException { // Deprecated API. return updatePartitonColStatsInternal(null, colStats, null, -1); } @Override public SetPartitionsStatsResponse update_partition_column_statistics_req( SetPartitionsStatsRequest req) throws NoSuchObjectException, InvalidObjectException, MetaException, InvalidInputException, TException { if (req.getColStatsSize() != 1) { throw new InvalidInputException("Only one stats object expected"); } if (req.isNeedMerge()) { throw new InvalidInputException("Merge is not supported for non-aggregate stats"); } ColumnStatistics colStats = req.getColStatsIterator().next(); boolean ret = updatePartitonColStatsInternal(null, colStats, req.getValidWriteIdList(), req.getWriteId()); return new SetPartitionsStatsResponse(ret); } @Override public boolean delete_partition_column_statistics(String dbName, String tableName, String partName, String colName, String engine) throws TException { dbName = dbName.toLowerCase(); String[] parsedDbName = parseDbName(dbName, conf); tableName = tableName.toLowerCase(); if (colName != null) { colName = colName.toLowerCase(); } String convertedPartName = lowerCaseConvertPartName(partName); startFunction("delete_column_statistics_by_partition",": table=" + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName) + " partition=" + convertedPartName + " column=" + colName); boolean ret = false, committed = false; getMS().openTransaction(); try { List<String> partVals = getPartValsFromName(getMS(), parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, convertedPartName); Table table = getMS().getTable(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName); // This API looks unused; if it were used we'd need to update stats state and write ID. // We cannot just randomly nuke some txn stats. if (TxnUtils.isTransactionalTable(table)) { throw new MetaException("Cannot delete stats via this API for a transactional table"); } ret = getMS().deletePartitionColumnStatistics(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, convertedPartName, partVals, colName, engine); if (ret) { if (transactionalListeners != null && !transactionalListeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DELETE_PARTITION_COLUMN_STAT, new DeletePartitionColumnStatEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, convertedPartName, partVals, colName, engine, this)); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DELETE_PARTITION_COLUMN_STAT, new DeletePartitionColumnStatEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, convertedPartName, partVals, colName, engine, this)); } } committed = getMS().commitTransaction(); } finally { if (!committed) { getMS().rollbackTransaction(); } endFunction("delete_column_statistics_by_partition", ret != false, null, tableName); } return ret; } @Override public boolean delete_table_column_statistics(String dbName, String tableName, String colName, String engine) throws TException { dbName = dbName.toLowerCase(); tableName = tableName.toLowerCase(); String[] parsedDbName = parseDbName(dbName, conf); if (colName != null) { colName = colName.toLowerCase(); } startFunction("delete_column_statistics_by_table", ": table=" + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName) + " column=" + colName); boolean ret = false, committed = false; getMS().openTransaction(); try { Table table = getMS().getTable(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName); // This API looks unused; if it were used we'd need to update stats state and write ID. // We cannot just randomly nuke some txn stats. if (TxnUtils.isTransactionalTable(table)) { throw new MetaException("Cannot delete stats via this API for a transactional table"); } ret = getMS().deleteTableColumnStatistics(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, colName, engine); if (ret) { if (transactionalListeners != null && !transactionalListeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DELETE_TABLE_COLUMN_STAT, new DeleteTableColumnStatEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, colName, engine, this)); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DELETE_TABLE_COLUMN_STAT, new DeleteTableColumnStatEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName, colName, engine, this)); } } committed = getMS().commitTransaction(); } finally { if (!committed) { getMS().rollbackTransaction(); } endFunction("delete_column_statistics_by_table", ret != false, null, tableName); } return ret; } @Override @Deprecated public List<Partition> get_partitions_by_filter(final String dbName, final String tblName, final String filter, final short maxParts) throws TException { String[] parsedDbName = parseDbName(dbName, conf); startTableFunction("get_partitions_by_filter", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName); fireReadTablePreEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName); List<Partition> ret = null; Exception ex = null; try { checkLimitNumberOfPartitionsByFilter(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName, filter, maxParts); authorizeTableForPartitionMetadata(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName); ret = getMS().getPartitionsByFilter(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName, filter, maxParts); ret = FilterUtils.filterPartitionsIfEnabled(isServerFilterEnabled, filterHook, ret); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_partitions_by_filter", ret != null, ex, tblName); } return ret; } @Override @Deprecated public List<PartitionSpec> get_part_specs_by_filter(final String dbName, final String tblName, final String filter, final int maxParts) throws TException { String[] parsedDbName = parseDbName(dbName, conf); startTableFunction("get_partitions_by_filter_pspec", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName); List<PartitionSpec> partitionSpecs = null; try { Table table = get_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName); // Don't pass the parsed db name, as get_partitions_by_filter will parse it itself List<Partition> partitions = get_partitions_by_filter(dbName, tblName, filter, (short) maxParts); if (is_partition_spec_grouping_enabled(table)) { partitionSpecs = MetaStoreServerUtils .getPartitionspecsGroupedByStorageDescriptor(table, partitions); } else { PartitionSpec pSpec = new PartitionSpec(); pSpec.setPartitionList(new PartitionListComposingSpec(partitions)); pSpec.setRootPath(table.getSd().getLocation()); pSpec.setCatName(parsedDbName[CAT_NAME]); pSpec.setDbName(parsedDbName[DB_NAME]); pSpec.setTableName(tblName); partitionSpecs = Arrays.asList(pSpec); } return partitionSpecs; } finally { endFunction("get_partitions_by_filter_pspec", partitionSpecs != null && !partitionSpecs.isEmpty(), null, tblName); } } @Override public PartitionsSpecByExprResult get_partitions_spec_by_expr( PartitionsByExprRequest req) throws TException { String dbName = req.getDbName(), tblName = req.getTblName(); String catName = req.isSetCatName() ? req.getCatName() : getDefaultCatalog(conf); startTableFunction("get_partitions_spec_by_expr", catName, dbName, tblName); fireReadTablePreEvent(catName, dbName, tblName); PartitionsSpecByExprResult ret = null; Exception ex = null; try { checkLimitNumberOfPartitionsByExpr(catName, dbName, tblName, req.getExpr(), UNLIMITED_MAX_PARTITIONS); List<Partition> partitions = new LinkedList<>(); boolean hasUnknownPartitions = getMS().getPartitionsByExpr(catName, dbName, tblName, req.getExpr(), req.getDefaultPartitionName(), req.getMaxParts(), partitions); Table table = get_table_core(catName, dbName, tblName); List<PartitionSpec> partitionSpecs = MetaStoreServerUtils.getPartitionspecsGroupedByStorageDescriptor(table, partitions); ret = new PartitionsSpecByExprResult(partitionSpecs, hasUnknownPartitions); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_partitions_spec_by_expr", ret != null, ex, tblName); } return ret; } @Override public PartitionsByExprResult get_partitions_by_expr( PartitionsByExprRequest req) throws TException { String dbName = req.getDbName(), tblName = req.getTblName(); String catName = req.isSetCatName() ? req.getCatName() : getDefaultCatalog(conf); startTableFunction("get_partitions_by_expr", catName, dbName, tblName); fireReadTablePreEvent(catName, dbName, tblName); PartitionsByExprResult ret = null; Exception ex = null; try { checkLimitNumberOfPartitionsByExpr(catName, dbName, tblName, req.getExpr(), UNLIMITED_MAX_PARTITIONS); List<Partition> partitions = new LinkedList<>(); boolean hasUnknownPartitions = getMS().getPartitionsByExpr(catName, dbName, tblName, req.getExpr(), req.getDefaultPartitionName(), req.getMaxParts(), partitions); ret = new PartitionsByExprResult(partitions, hasUnknownPartitions); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_partitions_by_expr", ret != null, ex, tblName); } return ret; } @Override @Deprecated public int get_num_partitions_by_filter(final String dbName, final String tblName, final String filter) throws TException { String[] parsedDbName = parseDbName(dbName, conf); if (parsedDbName[DB_NAME] == null || tblName == null) { throw new MetaException("The DB and table name cannot be null."); } startTableFunction("get_num_partitions_by_filter", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName); int ret = -1; Exception ex = null; try { ret = getMS().getNumPartitionsByFilter(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tblName, filter); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_num_partitions_by_filter", ret != -1, ex, tblName); } return ret; } private int get_num_partitions_by_expr(final String catName, final String dbName, final String tblName, final byte[] expr) throws TException { int ret = -1; Exception ex = null; try { ret = getMS().getNumPartitionsByExpr(catName, dbName, tblName, expr); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_num_partitions_by_expr", ret != -1, ex, tblName); } return ret; } @Override @Deprecated public List<Partition> get_partitions_by_names(final String dbName, final String tblName, final List<String> partNames) throws TException { return get_partitions_by_names(dbName, tblName, partNames, false, null, null); } @Override public GetPartitionsByNamesResult get_partitions_by_names_req(GetPartitionsByNamesRequest gpbnr) throws TException { List<Partition> partitions = get_partitions_by_names(gpbnr.getDb_name(), gpbnr.getTbl_name(), gpbnr.getNames(), gpbnr.isSetGet_col_stats() && gpbnr.isGet_col_stats(), gpbnr.getEngine(), gpbnr.getProcessorCapabilities(), gpbnr.getProcessorIdentifier()); return new GetPartitionsByNamesResult(partitions); } public List<Partition> get_partitions_by_names(final String dbName, final String tblName, final List<String> partNames, boolean getColStats, String engine, String validWriteIdList) throws TException { return get_partitions_by_names( dbName, tblName, partNames, getColStats, engine, null, null); } public List<Partition> get_partitions_by_names(final String dbName, final String tblName, final List<String> partNames, boolean getColStats, String engine, List<String> processorCapabilities, String processorId) throws TException { String[] dbNameParts = parseDbName(dbName, conf); String parsedCatName = dbNameParts[CAT_NAME]; String parsedDbName = dbNameParts[DB_NAME]; List<Partition> ret = null; Table table = null; Exception ex = null; boolean success = false; startTableFunction("get_partitions_by_names", parsedCatName, parsedDbName, tblName); try { getMS().openTransaction(); authorizeTableForPartitionMetadata(parsedCatName, parsedDbName, tblName); fireReadTablePreEvent(parsedCatName, parsedDbName, tblName); ret = getMS().getPartitionsByNames(parsedCatName, parsedDbName, tblName, partNames); ret = FilterUtils.filterPartitionsIfEnabled(isServerFilterEnabled, filterHook, ret); table = getTable(parsedCatName, parsedDbName, tblName); // If requested add column statistics in each of the partition objects if (getColStats) { // Since each partition may have stats collected for different set of columns, we // request them separately. for (Partition part: ret) { String partName = Warehouse.makePartName(table.getPartitionKeys(), part.getValues()); List<ColumnStatistics> partColStatsList = getMS().getPartitionColumnStatistics(parsedCatName, parsedDbName, tblName, Collections.singletonList(partName), StatsSetupConst.getColumnsHavingStats(part.getParameters()), engine); if (partColStatsList != null && !partColStatsList.isEmpty()) { ColumnStatistics partColStats = partColStatsList.get(0); if (partColStats != null) { part.setColStats(partColStats); } } } } if (processorCapabilities == null || processorCapabilities.size() == 0 || processorCapabilities.contains("MANAGERAWMETADATA")) { LOG.info("Skipping translation for processor with " + processorId); } else { if (transformer != null) { ret = transformer.transformPartitions(ret, table, processorCapabilities, processorId); } } success = getMS().commitTransaction(); } catch (Exception e) { ex = e; rethrowException(e); } finally { if (!success) { getMS().rollbackTransaction(); } endFunction("get_partitions_by_names", ret != null, ex, tblName); } return ret; } @Override public PrincipalPrivilegeSet get_privilege_set(HiveObjectRef hiveObject, String userName, List<String> groupNames) throws TException { firePreEvent(new PreAuthorizationCallEvent(this)); String catName = hiveObject.isSetCatName() ? hiveObject.getCatName() : getDefaultCatalog(conf); HiveObjectType debug = hiveObject.getObjectType(); if (hiveObject.getObjectType() == HiveObjectType.COLUMN) { String partName = getPartName(hiveObject); return this.get_column_privilege_set(catName, hiveObject.getDbName(), hiveObject .getObjectName(), partName, hiveObject.getColumnName(), userName, groupNames); } else if (hiveObject.getObjectType() == HiveObjectType.PARTITION) { String partName = getPartName(hiveObject); return this.get_partition_privilege_set(catName, hiveObject.getDbName(), hiveObject.getObjectName(), partName, userName, groupNames); } else if (hiveObject.getObjectType() == HiveObjectType.DATABASE) { return this.get_db_privilege_set(catName, hiveObject.getDbName(), userName, groupNames); } else if (hiveObject.getObjectType() == HiveObjectType.DATACONNECTOR) { return this.get_connector_privilege_set(catName, hiveObject.getObjectName(), userName, groupNames); } else if (hiveObject.getObjectType() == HiveObjectType.TABLE) { return this.get_table_privilege_set(catName, hiveObject.getDbName(), hiveObject .getObjectName(), userName, groupNames); } else if (hiveObject.getObjectType() == HiveObjectType.GLOBAL) { return this.get_user_privilege_set(userName, groupNames); } return null; } private String getPartName(HiveObjectRef hiveObject) throws MetaException { String partName = null; List<String> partValue = hiveObject.getPartValues(); if (partValue != null && partValue.size() > 0) { try { String catName = hiveObject.isSetCatName() ? hiveObject.getCatName() : getDefaultCatalog(conf); Table table = get_table_core(catName, hiveObject.getDbName(), hiveObject .getObjectName()); partName = Warehouse .makePartName(table.getPartitionKeys(), partValue); } catch (NoSuchObjectException e) { throw new MetaException(e.getMessage()); } } return partName; } private PrincipalPrivilegeSet get_column_privilege_set(String catName, final String dbName, final String tableName, final String partName, final String columnName, final String userName, final List<String> groupNames) throws TException { incrementCounter("get_column_privilege_set"); PrincipalPrivilegeSet ret; try { ret = getMS().getColumnPrivilegeSet( catName, dbName, tableName, partName, columnName, userName, groupNames); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } return ret; } private PrincipalPrivilegeSet get_db_privilege_set(String catName, final String dbName, final String userName, final List<String> groupNames) throws TException { incrementCounter("get_db_privilege_set"); PrincipalPrivilegeSet ret; try { ret = getMS().getDBPrivilegeSet(catName, dbName, userName, groupNames); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } return ret; } private PrincipalPrivilegeSet get_connector_privilege_set(String catName, final String connectorName, final String userName, final List<String> groupNames) throws TException { incrementCounter("get_connector_privilege_set"); PrincipalPrivilegeSet ret; try { ret = getMS().getConnectorPrivilegeSet(catName, connectorName, userName, groupNames); } catch (MetaException e) { throw e; } catch (Exception e) { throw new RuntimeException(e); } return ret; } private PrincipalPrivilegeSet get_partition_privilege_set( String catName, final String dbName, final String tableName, final String partName, final String userName, final List<String> groupNames) throws TException { incrementCounter("get_partition_privilege_set"); PrincipalPrivilegeSet ret; try { ret = getMS().getPartitionPrivilegeSet(catName, dbName, tableName, partName, userName, groupNames); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } return ret; } private PrincipalPrivilegeSet get_table_privilege_set(String catName, final String dbName, final String tableName, final String userName, final List<String> groupNames) throws TException { incrementCounter("get_table_privilege_set"); PrincipalPrivilegeSet ret; try { ret = getMS().getTablePrivilegeSet(catName, dbName, tableName, userName, groupNames); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } return ret; } @Override public boolean grant_role(final String roleName, final String principalName, final PrincipalType principalType, final String grantor, final PrincipalType grantorType, final boolean grantOption) throws TException { incrementCounter("add_role_member"); firePreEvent(new PreAuthorizationCallEvent(this)); if (PUBLIC.equals(roleName)) { throw new MetaException("No user can be added to " + PUBLIC +". Since all users implicitly" + " belong to " + PUBLIC + " role."); } Boolean ret; try { RawStore ms = getMS(); Role role = ms.getRole(roleName); if(principalType == PrincipalType.ROLE){ //check if this grant statement will end up creating a cycle if(isNewRoleAParent(principalName, roleName)){ throw new MetaException("Cannot grant role " + principalName + " to " + roleName + " as " + roleName + " already belongs to the role " + principalName + ". (no cycles allowed)"); } } ret = ms.grantRole(role, principalName, principalType, grantor, grantorType, grantOption); } catch (Exception e) { String exInfo = "Got exception: " + e.getClass().getName() + " " + e.getMessage(); LOG.error(exInfo, e); throw handleException(e).throwIfInstance(MetaException.class) .toMetaExceptionIfInstance(exInfo, InvalidObjectException.class, NoSuchObjectException.class) .defaultTException(); } return ret; } /** * Check if newRole is in parent hierarchy of curRole * @param newRole * @param curRole * @return true if newRole is curRole or present in its hierarchy * @throws MetaException */ private boolean isNewRoleAParent(String newRole, String curRole) throws MetaException { if(newRole.equals(curRole)){ return true; } //do this check recursively on all the parent roles of curRole List<Role> parentRoleMaps = getMS().listRoles(curRole, PrincipalType.ROLE); for(Role parentRole : parentRoleMaps){ if(isNewRoleAParent(newRole, parentRole.getRoleName())){ return true; } } return false; } @Override public List<Role> list_roles(final String principalName, final PrincipalType principalType) throws TException { incrementCounter("list_roles"); firePreEvent(new PreAuthorizationCallEvent(this)); return getMS().listRoles(principalName, principalType); } @Override public boolean create_role(final Role role) throws TException { incrementCounter("create_role"); firePreEvent(new PreAuthorizationCallEvent(this)); if (PUBLIC.equals(role.getRoleName())) { throw new MetaException(PUBLIC + " role implicitly exists. It can't be created."); } Boolean ret; try { ret = getMS().addRole(role.getRoleName(), role.getOwnerName()); } catch (Exception e) { String exInfo = "Got exception: " + e.getClass().getName() + " " + e.getMessage(); LOG.error(exInfo, e); throw handleException(e).throwIfInstance(MetaException.class) .toMetaExceptionIfInstance(exInfo, InvalidObjectException.class, NoSuchObjectException.class) .defaultTException(); } return ret; } @Override public boolean drop_role(final String roleName) throws TException { incrementCounter("drop_role"); firePreEvent(new PreAuthorizationCallEvent(this)); if (ADMIN.equals(roleName) || PUBLIC.equals(roleName)) { throw new MetaException(PUBLIC + "," + ADMIN + " roles can't be dropped."); } Boolean ret; try { ret = getMS().removeRole(roleName); } catch (Exception e) { String exInfo = "Got exception: " + e.getClass().getName() + " " + e.getMessage(); LOG.error(exInfo, e); throw handleException(e).throwIfInstance(MetaException.class) .toMetaExceptionIfInstance(exInfo, NoSuchObjectException.class) .defaultTException(); } return ret; } @Override public List<String> get_role_names() throws TException { incrementCounter("get_role_names"); firePreEvent(new PreAuthorizationCallEvent(this)); List<String> ret; try { ret = getMS().listRoleNames(); return ret; } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } } @Override public boolean grant_privileges(final PrivilegeBag privileges) throws TException { incrementCounter("grant_privileges"); firePreEvent(new PreAuthorizationCallEvent(this)); Boolean ret; try { ret = getMS().grantPrivileges(privileges); } catch (Exception e) { String exInfo = "Got exception: " + e.getClass().getName() + " " + e.getMessage(); LOG.error(exInfo, e); throw handleException(e).throwIfInstance(MetaException.class) .toMetaExceptionIfInstance(exInfo, InvalidObjectException.class, NoSuchObjectException.class) .defaultTException(); } return ret; } @Override public boolean revoke_role(final String roleName, final String userName, final PrincipalType principalType) throws TException { return revoke_role(roleName, userName, principalType, false); } private boolean revoke_role(final String roleName, final String userName, final PrincipalType principalType, boolean grantOption) throws TException { incrementCounter("remove_role_member"); firePreEvent(new PreAuthorizationCallEvent(this)); if (PUBLIC.equals(roleName)) { throw new MetaException(PUBLIC + " role can't be revoked."); } Boolean ret; try { RawStore ms = getMS(); Role mRole = ms.getRole(roleName); ret = ms.revokeRole(mRole, userName, principalType, grantOption); } catch (Exception e) { String exInfo = "Got exception: " + e.getClass().getName() + " " + e.getMessage(); LOG.error(exInfo, e); throw handleException(e).throwIfInstance(MetaException.class) .toMetaExceptionIfInstance(exInfo, NoSuchObjectException.class) .defaultTException(); } return ret; } @Override public GrantRevokeRoleResponse grant_revoke_role(GrantRevokeRoleRequest request) throws TException { GrantRevokeRoleResponse response = new GrantRevokeRoleResponse(); boolean grantOption = false; if (request.isSetGrantOption()) { grantOption = request.isGrantOption(); } switch (request.getRequestType()) { case GRANT: { boolean result = grant_role(request.getRoleName(), request.getPrincipalName(), request.getPrincipalType(), request.getGrantor(), request.getGrantorType(), grantOption); response.setSuccess(result); break; } case REVOKE: { boolean result = revoke_role(request.getRoleName(), request.getPrincipalName(), request.getPrincipalType(), grantOption); response.setSuccess(result); break; } default: throw new MetaException("Unknown request type " + request.getRequestType()); } return response; } @Override public GrantRevokePrivilegeResponse grant_revoke_privileges(GrantRevokePrivilegeRequest request) throws TException { GrantRevokePrivilegeResponse response = new GrantRevokePrivilegeResponse(); switch (request.getRequestType()) { case GRANT: { boolean result = grant_privileges(request.getPrivileges()); response.setSuccess(result); break; } case REVOKE: { boolean revokeGrantOption = false; if (request.isSetRevokeGrantOption()) { revokeGrantOption = request.isRevokeGrantOption(); } boolean result = revoke_privileges(request.getPrivileges(), revokeGrantOption); response.setSuccess(result); break; } default: throw new MetaException("Unknown request type " + request.getRequestType()); } return response; } @Override public GrantRevokePrivilegeResponse refresh_privileges(HiveObjectRef objToRefresh, String authorizer, GrantRevokePrivilegeRequest grantRequest) throws TException { incrementCounter("refresh_privileges"); firePreEvent(new PreAuthorizationCallEvent(this)); GrantRevokePrivilegeResponse response = new GrantRevokePrivilegeResponse(); try { boolean result = getMS().refreshPrivileges(objToRefresh, authorizer, grantRequest.getPrivileges()); response.setSuccess(result); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } return response; } @Override public boolean revoke_privileges(final PrivilegeBag privileges) throws TException { return revoke_privileges(privileges, false); } public boolean revoke_privileges(final PrivilegeBag privileges, boolean grantOption) throws TException { incrementCounter("revoke_privileges"); firePreEvent(new PreAuthorizationCallEvent(this)); Boolean ret; try { ret = getMS().revokePrivileges(privileges, grantOption); } catch (Exception e) { String exInfo = "Got exception: " + e.getClass().getName() + " " + e.getMessage(); LOG.error(exInfo, e); throw handleException(e).throwIfInstance(MetaException.class) .toMetaExceptionIfInstance(exInfo, InvalidObjectException.class, NoSuchObjectException.class) .defaultTException(); } return ret; } private PrincipalPrivilegeSet get_user_privilege_set(final String userName, final List<String> groupNames) throws TException { incrementCounter("get_user_privilege_set"); PrincipalPrivilegeSet ret; try { ret = getMS().getUserPrivilegeSet(userName, groupNames); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } return ret; } @Override public List<HiveObjectPrivilege> list_privileges(String principalName, PrincipalType principalType, HiveObjectRef hiveObject) throws TException { firePreEvent(new PreAuthorizationCallEvent(this)); String catName = hiveObject.isSetCatName() ? hiveObject.getCatName() : getDefaultCatalog(conf); if (hiveObject.getObjectType() == null) { return getAllPrivileges(principalName, principalType, catName); } if (hiveObject.getObjectType() == HiveObjectType.GLOBAL) { return list_global_privileges(principalName, principalType); } if (hiveObject.getObjectType() == HiveObjectType.DATABASE) { return list_db_privileges(principalName, principalType, catName, hiveObject .getDbName()); } if (hiveObject.getObjectType() == HiveObjectType.DATACONNECTOR) { return list_dc_privileges(principalName, principalType, hiveObject .getObjectName()); } if (hiveObject.getObjectType() == HiveObjectType.TABLE) { return list_table_privileges(principalName, principalType, catName, hiveObject.getDbName(), hiveObject.getObjectName()); } if (hiveObject.getObjectType() == HiveObjectType.PARTITION) { return list_partition_privileges(principalName, principalType, catName, hiveObject.getDbName(), hiveObject.getObjectName(), hiveObject .getPartValues()); } if (hiveObject.getObjectType() == HiveObjectType.COLUMN) { if (hiveObject.getPartValues() == null || hiveObject.getPartValues().isEmpty()) { return list_table_column_privileges(principalName, principalType, catName, hiveObject.getDbName(), hiveObject.getObjectName(), hiveObject.getColumnName()); } return list_partition_column_privileges(principalName, principalType, catName, hiveObject.getDbName(), hiveObject.getObjectName(), hiveObject .getPartValues(), hiveObject.getColumnName()); } return null; } private List<HiveObjectPrivilege> getAllPrivileges(String principalName, PrincipalType principalType, String catName) throws TException { List<HiveObjectPrivilege> privs = new ArrayList<>(); privs.addAll(list_global_privileges(principalName, principalType)); privs.addAll(list_db_privileges(principalName, principalType, catName, null)); privs.addAll(list_dc_privileges(principalName, principalType, null)); privs.addAll(list_table_privileges(principalName, principalType, catName, null, null)); privs.addAll(list_partition_privileges(principalName, principalType, catName, null, null, null)); privs.addAll(list_table_column_privileges(principalName, principalType, catName, null, null, null)); privs.addAll(list_partition_column_privileges(principalName, principalType, catName, null, null, null, null)); return privs; } private List<HiveObjectPrivilege> list_table_column_privileges( final String principalName, final PrincipalType principalType, String catName, final String dbName, final String tableName, final String columnName) throws TException { incrementCounter("list_table_column_privileges"); try { if (dbName == null) { return getMS().listPrincipalTableColumnGrantsAll(principalName, principalType); } if (principalName == null) { return getMS().listTableColumnGrantsAll(catName, dbName, tableName, columnName); } return getMS().listPrincipalTableColumnGrants(principalName, principalType, catName, dbName, tableName, columnName); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } } private List<HiveObjectPrivilege> list_partition_column_privileges( final String principalName, final PrincipalType principalType, String catName, final String dbName, final String tableName, final List<String> partValues, final String columnName) throws TException { incrementCounter("list_partition_column_privileges"); try { if (dbName == null) { return getMS().listPrincipalPartitionColumnGrantsAll(principalName, principalType); } Table tbl = get_table_core(catName, dbName, tableName); String partName = Warehouse.makePartName(tbl.getPartitionKeys(), partValues); if (principalName == null) { return getMS().listPartitionColumnGrantsAll(catName, dbName, tableName, partName, columnName); } return getMS().listPrincipalPartitionColumnGrants(principalName, principalType, catName, dbName, tableName, partValues, partName, columnName); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } } private List<HiveObjectPrivilege> list_db_privileges(final String principalName, final PrincipalType principalType, String catName, final String dbName) throws TException { incrementCounter("list_security_db_grant"); try { if (dbName == null) { return getMS().listPrincipalDBGrantsAll(principalName, principalType); } if (principalName == null) { return getMS().listDBGrantsAll(catName, dbName); } else { return getMS().listPrincipalDBGrants(principalName, principalType, catName, dbName); } } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } } private List<HiveObjectPrivilege> list_dc_privileges(final String principalName, final PrincipalType principalType, final String dcName) throws TException { incrementCounter("list_security_dc_grant"); try { if (dcName == null) { return getMS().listPrincipalDCGrantsAll(principalName, principalType); } if (principalName == null) { return getMS().listDCGrantsAll(dcName); } else { return getMS().listPrincipalDCGrants(principalName, principalType, dcName); } } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } } private List<HiveObjectPrivilege> list_partition_privileges( final String principalName, final PrincipalType principalType, String catName, final String dbName, final String tableName, final List<String> partValues) throws TException { incrementCounter("list_security_partition_grant"); try { if (dbName == null) { return getMS().listPrincipalPartitionGrantsAll(principalName, principalType); } Table tbl = get_table_core(catName, dbName, tableName); String partName = Warehouse.makePartName(tbl.getPartitionKeys(), partValues); if (principalName == null) { return getMS().listPartitionGrantsAll(catName, dbName, tableName, partName); } return getMS().listPrincipalPartitionGrants( principalName, principalType, catName, dbName, tableName, partValues, partName); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } } private List<HiveObjectPrivilege> list_table_privileges( final String principalName, final PrincipalType principalType, String catName, final String dbName, final String tableName) throws TException { incrementCounter("list_security_table_grant"); try { if (dbName == null) { return getMS().listPrincipalTableGrantsAll(principalName, principalType); } if (principalName == null) { return getMS().listTableGrantsAll(catName, dbName, tableName); } return getMS().listAllTableGrants(principalName, principalType, catName, dbName, tableName); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } } private List<HiveObjectPrivilege> list_global_privileges( final String principalName, final PrincipalType principalType) throws TException { incrementCounter("list_security_user_grant"); try { if (principalName == null) { return getMS().listGlobalGrantsAll(); } return getMS().listPrincipalGlobalGrants(principalName, principalType); } catch (Exception e) { throw handleException(e).throwIfInstance(MetaException.class).defaultRuntimeException(); } } @Override public void cancel_delegation_token(String token_str_form) throws TException { startFunction("cancel_delegation_token"); boolean success = false; Exception ex = null; try { HiveMetaStore.cancelDelegationToken(token_str_form); success = true; } catch (Exception e) { ex = e; throw handleException(e).convertIfInstance(IOException.class, MetaException.class).defaultMetaException(); } finally { endFunction("cancel_delegation_token", success, ex); } } @Override public long renew_delegation_token(String token_str_form) throws TException { startFunction("renew_delegation_token"); Long ret = null; Exception ex = null; try { ret = HiveMetaStore.renewDelegationToken(token_str_form); } catch (Exception e) { ex = e; throw handleException(e).convertIfInstance(IOException.class, MetaException.class).defaultMetaException(); } finally { endFunction("renew_delegation_token", ret != null, ex); } return ret; } @Override public String get_delegation_token(String token_owner, String renewer_kerberos_principal_name) throws TException { startFunction("get_delegation_token"); String ret = null; Exception ex = null; try { ret = HiveMetaStore.getDelegationToken(token_owner, renewer_kerberos_principal_name, getIPAddress()); } catch (Exception e) { ex = e; throw handleException(e).convertIfInstance(IOException.class, MetaException.class) .convertIfInstance(InterruptedException.class, MetaException.class) .defaultMetaException(); } finally { endFunction("get_delegation_token", ret != null, ex); } return ret; } @Override public boolean add_token(String token_identifier, String delegation_token) throws TException { startFunction("add_token", ": " + token_identifier); boolean ret = false; Exception ex = null; try { ret = getMS().addToken(token_identifier, delegation_token); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("add_token", ret == true, ex); } return ret; } @Override public boolean remove_token(String token_identifier) throws TException { startFunction("remove_token", ": " + token_identifier); boolean ret = false; Exception ex = null; try { ret = getMS().removeToken(token_identifier); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("remove_token", ret == true, ex); } return ret; } @Override public String get_token(String token_identifier) throws TException { startFunction("get_token for", ": " + token_identifier); String ret = null; Exception ex = null; try { ret = getMS().getToken(token_identifier); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_token", ret != null, ex); } //Thrift cannot return null result return ret == null ? "" : ret; } @Override public List<String> get_all_token_identifiers() throws TException { startFunction("get_all_token_identifiers."); List<String> ret; Exception ex = null; try { ret = getMS().getAllTokenIdentifiers(); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_all_token_identifiers.", ex == null, ex); } return ret; } @Override public int add_master_key(String key) throws TException { startFunction("add_master_key."); int ret; Exception ex = null; try { ret = getMS().addMasterKey(key); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("add_master_key.", ex == null, ex); } return ret; } @Override public void update_master_key(int seq_number, String key) throws TException { startFunction("update_master_key."); Exception ex = null; try { getMS().updateMasterKey(seq_number, key); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("update_master_key.", ex == null, ex); } } @Override public boolean remove_master_key(int key_seq) throws TException { startFunction("remove_master_key."); Exception ex = null; boolean ret; try { ret = getMS().removeMasterKey(key_seq); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("remove_master_key.", ex == null, ex); } return ret; } @Override public List<String> get_master_keys() throws TException { startFunction("get_master_keys."); Exception ex = null; String [] ret = null; try { ret = getMS().getMasterKeys(); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_master_keys.", ret != null, ex); } return Arrays.asList(ret); } @Override public void markPartitionForEvent(final String db_name, final String tbl_name, final Map<String, String> partName, final PartitionEventType evtType) throws TException { Table tbl = null; Exception ex = null; RawStore ms = getMS(); boolean success = false; try { String[] parsedDbName = parseDbName(db_name, conf); ms.openTransaction(); startPartitionFunction("markPartitionForEvent", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, partName); firePreEvent(new PreLoadPartitionDoneEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, partName, this)); tbl = ms.markPartitionForEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, partName, evtType); if (null == tbl) { throw new UnknownTableException("Table: " + tbl_name + " not found."); } if (transactionalListeners.size() > 0) { LoadPartitionDoneEvent lpde = new LoadPartitionDoneEvent(true, tbl, partName, this); for (MetaStoreEventListener transactionalListener : transactionalListeners) { transactionalListener.onLoadPartitionDone(lpde); } } success = ms.commitTransaction(); for (MetaStoreEventListener listener : listeners) { listener.onLoadPartitionDone(new LoadPartitionDoneEvent(true, tbl, partName, this)); } } catch (Exception original) { ex = original; LOG.error("Exception caught in mark partition event ", original); throw handleException(original) .throwIfInstance(UnknownTableException.class, InvalidPartitionException.class, MetaException.class) .defaultMetaException(); } finally { if (!success) { ms.rollbackTransaction(); } endFunction("markPartitionForEvent", tbl != null, ex, tbl_name); } } @Override public boolean isPartitionMarkedForEvent(final String db_name, final String tbl_name, final Map<String, String> partName, final PartitionEventType evtType) throws TException { String[] parsedDbName = parseDbName(db_name, conf); startPartitionFunction("isPartitionMarkedForEvent", parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, partName); Boolean ret = null; Exception ex = null; try { ret = getMS().isPartitionMarkedForEvent(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name, partName, evtType); } catch (Exception original) { LOG.error("Exception caught for isPartitionMarkedForEvent ", original); ex = original; throw handleException(original).throwIfInstance(UnknownTableException.class, InvalidPartitionException.class) .throwIfInstance(UnknownPartitionException.class, MetaException.class) .defaultMetaException(); } finally { endFunction("isPartitionMarkedForEvent", ret != null, ex, tbl_name); } return ret; } @Override public List<String> set_ugi(String username, List<String> groupNames) throws TException { Collections.addAll(groupNames, username); return groupNames; } @Override public boolean partition_name_has_valid_characters(List<String> part_vals, boolean throw_exception) throws TException { startFunction("partition_name_has_valid_characters"); boolean ret; Exception ex = null; try { if (throw_exception) { MetaStoreServerUtils.validatePartitionNameCharacters(part_vals, partitionValidationPattern); ret = true; } else { ret = MetaStoreServerUtils.partitionNameHasValidCharacters(part_vals, partitionValidationPattern); } } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("partition_name_has_valid_characters", true, ex); } return ret; } private void validateFunctionInfo(Function func) throws InvalidObjectException, MetaException { if (func == null) { throw new MetaException("Function cannot be null."); } if (func.getFunctionName() == null) { throw new MetaException("Function name cannot be null."); } if (func.getDbName() == null) { throw new MetaException("Database name in Function cannot be null."); } if (!MetaStoreUtils.validateName(func.getFunctionName(), null)) { throw new InvalidObjectException(func.getFunctionName() + " is not a valid object name"); } String className = func.getClassName(); if (className == null) { throw new InvalidObjectException("Function class name cannot be null"); } if (func.getOwnerType() == null) { throw new MetaException("Function owner type cannot be null."); } if (func.getFunctionType() == null) { throw new MetaException("Function type cannot be null."); } } @Override public void create_function(Function func) throws TException { validateFunctionInfo(func); boolean success = false; RawStore ms = getMS(); Map<String, String> transactionalListenerResponses = Collections.emptyMap(); try { String catName = func.isSetCatName() ? func.getCatName() : getDefaultCatalog(conf); if (!func.isSetOwnerName()) { try { func.setOwnerName(SecurityUtils.getUGI().getShortUserName()); } catch (Exception ex) { LOG.error("Cannot obtain username from the session to create a function", ex); throw new TException(ex); } } ms.openTransaction(); Database db = ms.getDatabase(catName, func.getDbName()); if (db == null) { throw new NoSuchObjectException("The database " + func.getDbName() + " does not exist"); } if (db.getType() == DatabaseType.REMOTE) { throw new MetaException("Operation create_function not support for REMOTE database"); } Function existingFunc = ms.getFunction(catName, func.getDbName(), func.getFunctionName()); if (existingFunc != null) { throw new AlreadyExistsException( "Function " + func.getFunctionName() + " already exists"); } firePreEvent(new PreCreateFunctionEvent(func, this)); long time = System.currentTimeMillis() / 1000; func.setCreateTime((int) time); ms.createFunction(func); if (!transactionalListeners.isEmpty()) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.CREATE_FUNCTION, new CreateFunctionEvent(func, true, this)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.CREATE_FUNCTION, new CreateFunctionEvent(func, success, this), null, transactionalListenerResponses, ms); } } } @Override public void drop_function(String dbName, String funcName) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException { if (funcName == null) { throw new MetaException("Function name cannot be null."); } boolean success = false; Function func = null; RawStore ms = getMS(); Map<String, String> transactionalListenerResponses = Collections.emptyMap(); String[] parsedDbName = parseDbName(dbName, conf); if (parsedDbName[DB_NAME] == null) { throw new MetaException("Database name cannot be null."); } try { ms.openTransaction(); func = ms.getFunction(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], funcName); if (func == null) { throw new NoSuchObjectException("Function " + funcName + " does not exist"); } Boolean needsCm = ReplChangeManager.isSourceOfReplication(get_database_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME])); // if copy of jar to change management fails we fail the metastore transaction, since the // user might delete the jars on HDFS externally after dropping the function, hence having // a copy is required to allow incremental replication to work correctly. if (func.getResourceUris() != null && !func.getResourceUris().isEmpty()) { for (ResourceUri uri : func.getResourceUris()) { if (uri.getUri().toLowerCase().startsWith("hdfs:") && needsCm) { wh.addToChangeManagement(new Path(uri.getUri())); } } } firePreEvent(new PreDropFunctionEvent(func, this)); // if the operation on metastore fails, we don't do anything in change management, but fail // the metastore transaction, as having a copy of the jar in change management is not going // to cause any problem, the cleaner thread will remove this when this jar expires. ms.dropFunction(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], funcName); if (transactionalListeners.size() > 0) { transactionalListenerResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DROP_FUNCTION, new DropFunctionEvent(func, true, this)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (listeners.size() > 0) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_FUNCTION, new DropFunctionEvent(func, success, this), null, transactionalListenerResponses, ms); } } } @Override public void alter_function(String dbName, String funcName, Function newFunc) throws TException { String[] parsedDbName = parseDbName(dbName, conf); validateForAlterFunction(parsedDbName[DB_NAME], funcName, newFunc); boolean success = false; RawStore ms = getMS(); try { firePreEvent(new PreCreateFunctionEvent(newFunc, this)); ms.openTransaction(); ms.alterFunction(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], funcName, newFunc); success = ms.commitTransaction(); } catch (InvalidObjectException e) { // Throwing MetaException instead of InvalidObjectException as the InvalidObjectException // is not defined for the alter_function method in the Thrift interface. throwMetaException(e); } finally { if (!success) { ms.rollbackTransaction(); } } } private void validateForAlterFunction(String dbName, String funcName, Function newFunc) throws MetaException { if (dbName == null || funcName == null) { throw new MetaException("Database and function name cannot be null."); } try { validateFunctionInfo(newFunc); } catch (InvalidObjectException e) { // The validateFunctionInfo method is used by the create and alter function methods as well // and it can throw InvalidObjectException. But the InvalidObjectException is not defined // for the alter_function method in the Thrift interface, therefore a TApplicationException // will occur at the caller side. Re-throwing the InvalidObjectException as MetaException // would eliminate the TApplicationException at caller side. throw newMetaException(e); } } @Override public List<String> get_functions(String dbName, String pattern) throws MetaException { startFunction("get_functions", ": db=" + dbName + " pat=" + pattern); RawStore ms = getMS(); Exception ex = null; List<String> funcNames = null; String[] parsedDbName = parseDbName(dbName, conf); try { funcNames = ms.getFunctions(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], pattern); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_functions", funcNames != null, ex); } return funcNames; } @Override public GetAllFunctionsResponse get_all_functions() throws MetaException { GetAllFunctionsResponse response = new GetAllFunctionsResponse(); startFunction("get_all_functions"); RawStore ms = getMS(); List<Function> allFunctions = null; Exception ex = null; try { // Leaving this as the 'hive' catalog (rather than choosing the default from the // configuration) because all the default UDFs are in that catalog, and I think that's // would people really want here. allFunctions = ms.getAllFunctions(DEFAULT_CATALOG_NAME); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_all_functions", allFunctions != null, ex); } response.setFunctions(allFunctions); return response; } @Override public Function get_function(String dbName, String funcName) throws TException { if (dbName == null || funcName == null) { throw new MetaException("Database and function name cannot be null."); } startFunction("get_function", ": " + dbName + "." + funcName); RawStore ms = getMS(); Function func = null; Exception ex = null; String[] parsedDbName = parseDbName(dbName, conf); try { func = ms.getFunction(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], funcName); if (func == null) { throw new NoSuchObjectException( "Function " + dbName + "." + funcName + " does not exist"); } } catch (Exception e) { ex = e; throw handleException(e).throwIfInstance(NoSuchObjectException.class).defaultMetaException(); } finally { endFunction("get_function", func != null, ex); } return func; } // Transaction and locking methods @Override public GetOpenTxnsResponse get_open_txns() throws TException { return getTxnHandler().getOpenTxns(); } @Override public GetOpenTxnsResponse get_open_txns_req(GetOpenTxnsRequest getOpenTxnsRequest) throws TException { return getTxnHandler().getOpenTxns(getOpenTxnsRequest.getExcludeTxnTypes()); } // Transaction and locking methods @Override public GetOpenTxnsInfoResponse get_open_txns_info() throws TException { return getTxnHandler().getOpenTxnsInfo(); } @Override public OpenTxnsResponse open_txns(OpenTxnRequest rqst) throws TException { OpenTxnsResponse response = getTxnHandler().openTxns(rqst); List<Long> txnIds = response.getTxn_ids(); boolean isHiveReplTxn = rqst.isSetReplPolicy() && TxnType.DEFAULT.equals(rqst.getTxn_type()); if (txnIds != null && listeners != null && !listeners.isEmpty() && !isHiveReplTxn) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.OPEN_TXN, new OpenTxnEvent(txnIds, this)); } return response; } @Override public void abort_txn(AbortTxnRequest rqst) throws TException { getTxnHandler().abortTxn(rqst); boolean isHiveReplTxn = rqst.isSetReplPolicy() && TxnType.DEFAULT.equals(rqst.getTxn_type()); if (listeners != null && !listeners.isEmpty() && !isHiveReplTxn) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ABORT_TXN, new AbortTxnEvent(rqst.getTxnid(), this)); } } @Override public void abort_txns(AbortTxnsRequest rqst) throws TException { getTxnHandler().abortTxns(rqst); if (listeners != null && !listeners.isEmpty()) { for (Long txnId : rqst.getTxn_ids()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ABORT_TXN, new AbortTxnEvent(txnId, this)); } } } @Override public long get_latest_txnid_in_conflict(long txnId) throws MetaException { return getTxnHandler().getLatestTxnIdInConflict(txnId); } @Override public void commit_txn(CommitTxnRequest rqst) throws TException { boolean isReplayedReplTxn = TxnType.REPL_CREATED.equals(rqst.getTxn_type()); boolean isHiveReplTxn = rqst.isSetReplPolicy() && TxnType.DEFAULT.equals(rqst.getTxn_type()); // in replication flow, the write notification log table will be updated here. if (rqst.isSetWriteEventInfos() && isReplayedReplTxn) { assert (rqst.isSetReplPolicy()); long targetTxnId = getTxnHandler().getTargetTxnId(rqst.getReplPolicy(), rqst.getTxnid()); if (targetTxnId < 0) { //looks like a retry return; } for (WriteEventInfo writeEventInfo : rqst.getWriteEventInfos()) { String[] filesAdded = ReplChangeManager.getListFromSeparatedString(writeEventInfo.getFiles()); List<String> partitionValue = null; Partition ptnObj = null; String root; Table tbl = getTblObject(writeEventInfo.getDatabase(), writeEventInfo.getTable(), null); if (writeEventInfo.getPartition() != null && !writeEventInfo.getPartition().isEmpty()) { partitionValue = Warehouse.getPartValuesFromPartName(writeEventInfo.getPartition()); ptnObj = getPartitionObj(writeEventInfo.getDatabase(), writeEventInfo.getTable(), partitionValue, tbl); root = ptnObj.getSd().getLocation(); } else { root = tbl.getSd().getLocation(); } InsertEventRequestData insertData = new InsertEventRequestData(); insertData.setReplace(true); // The files in the commit txn message during load will have files with path corresponding to source // warehouse. Need to transform them to target warehouse using table or partition object location. for (String file : filesAdded) { String[] decodedPath = ReplChangeManager.decodeFileUri(file); String name = (new Path(decodedPath[0])).getName(); Path newPath = FileUtils.getTransformedPath(name, decodedPath[3], root); insertData.addToFilesAdded(newPath.toUri().toString()); insertData.addToSubDirectoryList(decodedPath[3]); try { insertData.addToFilesAddedChecksum(ReplChangeManager.checksumFor(newPath, newPath.getFileSystem(conf))); } catch (IOException e) { LOG.error("failed to get checksum for the file " + newPath + " with error: " + e.getMessage()); throw new TException(e.getMessage()); } } WriteNotificationLogRequest wnRqst = new WriteNotificationLogRequest(targetTxnId, writeEventInfo.getWriteId(), writeEventInfo.getDatabase(), writeEventInfo.getTable(), insertData); if (partitionValue != null) { wnRqst.setPartitionVals(partitionValue); } addTxnWriteNotificationLog(tbl, ptnObj, wnRqst); } } getTxnHandler().commitTxn(rqst); if (listeners != null && !listeners.isEmpty() && !isHiveReplTxn) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.COMMIT_TXN, new CommitTxnEvent(rqst.getTxnid(), this)); Optional<CompactionInfo> compactionInfo = getTxnHandler().getCompactionByTxnId(rqst.getTxnid()); if (compactionInfo.isPresent()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.COMMIT_COMPACTION, new CommitCompactionEvent(rqst.getTxnid(), compactionInfo.get(), this)); } } } @Override public void repl_tbl_writeid_state(ReplTblWriteIdStateRequest rqst) throws TException { getTxnHandler().replTableWriteIdState(rqst); } @Override public GetValidWriteIdsResponse get_valid_write_ids(GetValidWriteIdsRequest rqst) throws TException { return getTxnHandler().getValidWriteIds(rqst); } @Override public void set_hadoop_jobid(String jobId, long cqId) { getTxnHandler().setHadoopJobId(jobId, cqId); } @Deprecated @Override public OptionalCompactionInfoStruct find_next_compact(String workerId) throws MetaException{ return CompactionInfo.compactionInfoToOptionalStruct( getTxnHandler().findNextToCompact(workerId)); } @Override public OptionalCompactionInfoStruct find_next_compact2(FindNextCompactRequest rqst) throws MetaException{ return CompactionInfo.compactionInfoToOptionalStruct( getTxnHandler().findNextToCompact(rqst)); } @Override public void mark_cleaned(CompactionInfoStruct cr) throws MetaException { getTxnHandler().markCleaned(CompactionInfo.compactionStructToInfo(cr)); } @Override public void mark_compacted(CompactionInfoStruct cr) throws MetaException { getTxnHandler().markCompacted(CompactionInfo.compactionStructToInfo(cr)); } @Override public void mark_failed(CompactionInfoStruct cr) throws MetaException { getTxnHandler().markFailed(CompactionInfo.compactionStructToInfo(cr)); } @Override public List<String> find_columns_with_stats(CompactionInfoStruct cr) throws MetaException { return getTxnHandler().findColumnsWithStats(CompactionInfo.compactionStructToInfo(cr)); } @Override public void update_compactor_state(CompactionInfoStruct cr, long highWaterMark) throws MetaException { getTxnHandler().updateCompactorState( CompactionInfo.compactionStructToInfo(cr), highWaterMark); } @Override public GetLatestCommittedCompactionInfoResponse get_latest_committed_compaction_info( GetLatestCommittedCompactionInfoRequest rqst) throws MetaException { if (rqst.getDbname() == null || rqst.getTablename() == null) { throw new MetaException("Database name and table name cannot be null."); } GetLatestCommittedCompactionInfoResponse response = getTxnHandler().getLatestCommittedCompactionInfo(rqst); return FilterUtils.filterCommittedCompactionInfoStructIfEnabled(isServerFilterEnabled, filterHook, getDefaultCatalog(conf), rqst.getDbname(), rqst.getTablename(), response); } @Override public AllocateTableWriteIdsResponse allocate_table_write_ids( AllocateTableWriteIdsRequest rqst) throws TException { AllocateTableWriteIdsResponse response = getTxnHandler().allocateTableWriteIds(rqst); if (listeners != null && !listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALLOC_WRITE_ID, new AllocWriteIdEvent(response.getTxnToWriteIds(), rqst.getDbName(), rqst.getTableName(), this)); } return response; } @Override public MaxAllocatedTableWriteIdResponse get_max_allocated_table_write_id(MaxAllocatedTableWriteIdRequest rqst) throws MetaException { return getTxnHandler().getMaxAllocatedTableWrited(rqst); } @Override public void seed_write_id(SeedTableWriteIdsRequest rqst) throws MetaException { getTxnHandler().seedWriteId(rqst); } @Override public void seed_txn_id(SeedTxnIdRequest rqst) throws MetaException { getTxnHandler().seedTxnId(rqst); } private void addTxnWriteNotificationLog(Table tableObj, Partition ptnObj, WriteNotificationLogRequest rqst) throws MetaException { String partition = ""; //Empty string is an invalid partition name. Can be used for non partitioned table. if (ptnObj != null) { partition = Warehouse.makePartName(tableObj.getPartitionKeys(), rqst.getPartitionVals()); } AcidWriteEvent event = new AcidWriteEvent(partition, tableObj, ptnObj, rqst); getTxnHandler().addWriteNotificationLog(event); if (listeners != null && !listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ACID_WRITE, event); } } private Table getTblObject(String db, String table, String catalog) throws MetaException, NoSuchObjectException { GetTableRequest req = new GetTableRequest(db, table); if (catalog != null) { req.setCatName(catalog); } req.setCapabilities(new ClientCapabilities(Lists.newArrayList(ClientCapability.TEST_CAPABILITY, ClientCapability.INSERT_ONLY_TABLES))); return get_table_req(req).getTable(); } private Partition getPartitionObj(String db, String table, List<String> partitionVals, Table tableObj) throws MetaException, NoSuchObjectException { if (tableObj.isSetPartitionKeys() && !tableObj.getPartitionKeys().isEmpty()) { return get_partition(db, table, partitionVals); } return null; } @Override public WriteNotificationLogResponse add_write_notification_log(WriteNotificationLogRequest rqst) throws TException { Table tableObj = getTblObject(rqst.getDb(), rqst.getTable(), null); Partition ptnObj = getPartitionObj(rqst.getDb(), rqst.getTable(), rqst.getPartitionVals(), tableObj); addTxnWriteNotificationLog(tableObj, ptnObj, rqst); return new WriteNotificationLogResponse(); } @Override public WriteNotificationLogBatchResponse add_write_notification_log_in_batch( WriteNotificationLogBatchRequest batchRequest) throws TException { if (batchRequest.getRequestList().size() == 0) { return new WriteNotificationLogBatchResponse(); } Table tableObj = getTblObject(batchRequest.getDb(), batchRequest.getTable(), batchRequest.getCatalog()); BatchAcidWriteEvent event = new BatchAcidWriteEvent(); List<String> partNameList = new ArrayList<>(); List<Partition> ptnObjList; Map<String, WriteNotificationLogRequest> rqstMap = new HashMap<>(); if (tableObj.getPartitionKeys().size() != 0) { // partitioned table for (WriteNotificationLogRequest rqst : batchRequest.getRequestList()) { String partition = Warehouse.makePartName(tableObj.getPartitionKeys(), rqst.getPartitionVals()); partNameList.add(partition); // This is used to ignore those request for which the partition does not exists. rqstMap.put(partition, rqst); } ptnObjList = getMS().getPartitionsByNames(tableObj.getCatName(), tableObj.getDbName(), tableObj.getTableName(), partNameList); } else { ptnObjList = new ArrayList<>(); for (WriteNotificationLogRequest ignored : batchRequest.getRequestList()) { ptnObjList.add(null); } } int idx = 0; for (Partition partObject : ptnObjList) { String partition = ""; //Empty string is an invalid partition name. Can be used for non partitioned table. WriteNotificationLogRequest request; if (partObject != null) { partition = Warehouse.makePartName(tableObj.getPartitionKeys(), partObject.getValues()); request = rqstMap.get(partition); } else { // for non partitioned table, we can get serially from the list. request = batchRequest.getRequestList().get(idx++); } event.addNotification(partition, tableObj, partObject, request); if (listeners != null && !listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.BATCH_ACID_WRITE, new BatchAcidWriteEvent(partition, tableObj, partObject, request)); } } getTxnHandler().addWriteNotificationLog(event); return new WriteNotificationLogBatchResponse(); } @Override public LockResponse lock(LockRequest rqst) throws TException { return getTxnHandler().lock(rqst); } @Override public LockResponse check_lock(CheckLockRequest rqst) throws TException { return getTxnHandler().checkLock(rqst); } @Override public void unlock(UnlockRequest rqst) throws TException { getTxnHandler().unlock(rqst); } @Override public ShowLocksResponse show_locks(ShowLocksRequest rqst) throws TException { return getTxnHandler().showLocks(rqst); } @Override public void heartbeat(HeartbeatRequest ids) throws TException { getTxnHandler().heartbeat(ids); } @Override public HeartbeatTxnRangeResponse heartbeat_txn_range(HeartbeatTxnRangeRequest rqst) throws TException { return getTxnHandler().heartbeatTxnRange(rqst); } @Deprecated @Override public void compact(CompactionRequest rqst) throws TException { compact2(rqst); } @Override public CompactionResponse compact2(CompactionRequest rqst) throws TException { return getTxnHandler().compact(rqst); } @Override public ShowCompactResponse show_compact(ShowCompactRequest rqst) throws TException { ShowCompactResponse response = getTxnHandler().showCompact(rqst); response.setCompacts(FilterUtils.filterCompactionsIfEnabled(isServerFilterEnabled, filterHook, getDefaultCatalog(conf), response.getCompacts())); return response; } @Override public void flushCache() throws TException { getMS().flushCache(); } @Override public void add_dynamic_partitions(AddDynamicPartitions rqst) throws TException { getTxnHandler().addDynamicPartitions(rqst); } @Override public GetPrincipalsInRoleResponse get_principals_in_role(GetPrincipalsInRoleRequest request) throws TException { incrementCounter("get_principals_in_role"); firePreEvent(new PreAuthorizationCallEvent(this)); Exception ex = null; GetPrincipalsInRoleResponse response = null; try { response = new GetPrincipalsInRoleResponse(getMS().listRoleMembers(request.getRoleName())); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_principals_in_role", ex == null, ex); } return response; } @Override public GetRoleGrantsForPrincipalResponse get_role_grants_for_principal( GetRoleGrantsForPrincipalRequest request) throws TException { incrementCounter("get_role_grants_for_principal"); firePreEvent(new PreAuthorizationCallEvent(this)); Exception ex = null; List<RolePrincipalGrant> roleMaps = null; try { roleMaps = getMS().listRolesWithGrants(request.getPrincipal_name(), request.getPrincipal_type()); } catch (Exception e) { ex = e; rethrowException(e); } finally { endFunction("get_role_grants_for_principal", ex == null, ex); } //List<RolePrincipalGrant> roleGrantsList = getRolePrincipalGrants(roleMaps); return new GetRoleGrantsForPrincipalResponse(roleMaps); } @Override public AggrStats get_aggr_stats_for(PartitionsStatsRequest request) throws TException { String catName = request.isSetCatName() ? request.getCatName().toLowerCase() : getDefaultCatalog(conf); String dbName = request.getDbName().toLowerCase(); String tblName = request.getTblName().toLowerCase(); startFunction("get_aggr_stats_for", ": table=" + TableName.getQualified(catName, dbName, tblName)); List<String> lowerCaseColNames = new ArrayList<>(request.getColNames().size()); for (String colName : request.getColNames()) { lowerCaseColNames.add(colName.toLowerCase()); } List<String> lowerCasePartNames = new ArrayList<>(request.getPartNames().size()); for (String partName : request.getPartNames()) { lowerCasePartNames.add(lowerCaseConvertPartName(partName)); } AggrStats aggrStats = null; try { aggrStats = getMS().get_aggr_stats_for(catName, dbName, tblName, lowerCasePartNames, lowerCaseColNames, request.getEngine(), request.getValidWriteIdList()); return aggrStats; } finally { endFunction("get_aggr_stats_for", aggrStats == null, null, request.getTblName()); } } @Override public boolean set_aggr_stats_for(SetPartitionsStatsRequest request) throws TException { boolean ret = true; List<ColumnStatistics> csNews = request.getColStats(); if (csNews == null || csNews.isEmpty()) { return ret; } // figure out if it is table level or partition level ColumnStatistics firstColStats = csNews.get(0); ColumnStatisticsDesc statsDesc = firstColStats.getStatsDesc(); String catName = statsDesc.isSetCatName() ? statsDesc.getCatName() : getDefaultCatalog(conf); String dbName = statsDesc.getDbName(); String tableName = statsDesc.getTableName(); List<String> colNames = new ArrayList<>(); for (ColumnStatisticsObj obj : firstColStats.getStatsObj()) { colNames.add(obj.getColName()); } if (statsDesc.isIsTblLevel()) { // there should be only one ColumnStatistics if (request.getColStatsSize() != 1) { throw new MetaException( "Expecting only 1 ColumnStatistics for table's column stats, but find " + request.getColStatsSize()); } if (request.isSetNeedMerge() && request.isNeedMerge()) { return updateTableColumnStatsWithMerge(catName, dbName, tableName, colNames, request); } else { // This is the overwrite case, we do not care about the accuracy. return updateTableColumnStatsInternal(firstColStats, request.getValidWriteIdList(), request.getWriteId()); } } else { // partition level column stats merging // note that we may have two or more duplicate partition names. // see autoColumnStats_2.q under TestMiniLlapLocalCliDriver Map<String, ColumnStatistics> newStatsMap = new HashMap<>(); for (ColumnStatistics csNew : csNews) { String partName = csNew.getStatsDesc().getPartName(); if (newStatsMap.containsKey(partName)) { MetaStoreServerUtils.mergeColStats(csNew, newStatsMap.get(partName)); } newStatsMap.put(partName, csNew); } if (request.isSetNeedMerge() && request.isNeedMerge()) { ret = updatePartColumnStatsWithMerge(catName, dbName, tableName, colNames, newStatsMap, request); } else { // No merge. Table t = getTable(catName, dbName, tableName); // We don't short-circuit on errors here anymore. That can leave acid stats invalid. if (MetastoreConf.getBoolVar(getConf(), ConfVars.TRY_DIRECT_SQL)) { ret = updatePartitionColStatsInBatch(t, newStatsMap, request.getValidWriteIdList(), request.getWriteId()); } else { for (Map.Entry<String, ColumnStatistics> entry : newStatsMap.entrySet()) { // We don't short-circuit on errors here anymore. That can leave acid stats invalid. ret = updatePartitonColStatsInternal(t, entry.getValue(), request.getValidWriteIdList(), request.getWriteId()) && ret; } } } } return ret; } private boolean updatePartColumnStatsWithMerge(String catName, String dbName, String tableName, List<String> colNames, Map<String, ColumnStatistics> newStatsMap, SetPartitionsStatsRequest request) throws MetaException, NoSuchObjectException, InvalidObjectException, InvalidInputException { RawStore ms = getMS(); ms.openTransaction(); boolean isCommitted = false, result = false; try { // a single call to get all column stats for all partitions List<String> partitionNames = new ArrayList<>(); partitionNames.addAll(newStatsMap.keySet()); List<ColumnStatistics> csOlds = ms.getPartitionColumnStatistics(catName, dbName, tableName, partitionNames, colNames, request.getEngine(), request.getValidWriteIdList()); if (newStatsMap.values().size() != csOlds.size()) { // some of the partitions miss stats. LOG.debug("Some of the partitions miss stats."); } Map<String, ColumnStatistics> oldStatsMap = new HashMap<>(); for (ColumnStatistics csOld : csOlds) { oldStatsMap.put(csOld.getStatsDesc().getPartName(), csOld); } // another single call to get all the partition objects List<Partition> partitions = ms.getPartitionsByNames(catName, dbName, tableName, partitionNames); Map<String, Partition> mapToPart = new HashMap<>(); for (int index = 0; index < partitionNames.size(); index++) { mapToPart.put(partitionNames.get(index), partitions.get(index)); } Table t = getTable(catName, dbName, tableName); Map<String, ColumnStatistics> statsMap = new HashMap<>(); boolean useDirectSql = MetastoreConf.getBoolVar(getConf(), ConfVars.TRY_DIRECT_SQL); for (Map.Entry<String, ColumnStatistics> entry : newStatsMap.entrySet()) { ColumnStatistics csNew = entry.getValue(); ColumnStatistics csOld = oldStatsMap.get(entry.getKey()); boolean isInvalidTxnStats = csOld != null && csOld.isSetIsStatsCompliant() && !csOld.isIsStatsCompliant(); Partition part = mapToPart.get(entry.getKey()); if (isInvalidTxnStats) { // No columns can be merged; a shortcut for getMergableCols. csNew.setStatsObj(Lists.newArrayList()); } else { // we first use getParameters() to prune the stats MetaStoreServerUtils.getMergableCols(csNew, part.getParameters()); // we merge those that can be merged if (csOld != null && csOld.getStatsObjSize() != 0 && !csNew.getStatsObj().isEmpty()) { MetaStoreServerUtils.mergeColStats(csNew, csOld); } } if (!csNew.getStatsObj().isEmpty()) { // We don't short-circuit on errors here anymore. That can leave acid stats invalid. if (useDirectSql) { statsMap.put(csNew.getStatsDesc().getPartName(), csNew); } else { result = updatePartitonColStatsInternal(t, csNew, request.getValidWriteIdList(), request.getWriteId()) && result; } } else if (isInvalidTxnStats) { // For now because the stats state is such as it is, we will invalidate everything. // Overall the sematics here are not clear - we could invalide only some columns, but does // that make any physical sense? Could query affect some columns but not others? part.setWriteId(request.getWriteId()); StatsSetupConst.clearColumnStatsState(part.getParameters()); StatsSetupConst.setBasicStatsState(part.getParameters(), StatsSetupConst.FALSE); ms.alterPartition(catName, dbName, tableName, part.getValues(), part, request.getValidWriteIdList()); result = false; } else { // TODO: why doesn't the original call for non acid tables invalidate the stats? LOG.debug("All the column stats " + csNew.getStatsDesc().getPartName() + " are not accurate to merge."); } } ms.commitTransaction(); isCommitted = true; // updatePartitionColStatsInBatch starts/commit transaction internally. As there is no write or select for update // operations is done in this transaction, it is safe to commit it before calling updatePartitionColStatsInBatch. if (!statsMap.isEmpty()) { updatePartitionColStatsInBatch(t, statsMap, request.getValidWriteIdList(), request.getWriteId()); } } finally { if (!isCommitted) { ms.rollbackTransaction(); } } return result; } private boolean updateTableColumnStatsWithMerge(String catName, String dbName, String tableName, List<String> colNames, SetPartitionsStatsRequest request) throws MetaException, NoSuchObjectException, InvalidObjectException, InvalidInputException { ColumnStatistics firstColStats = request.getColStats().get(0); RawStore ms = getMS(); ms.openTransaction(); boolean isCommitted = false, result = false; try { ColumnStatistics csOld = ms.getTableColumnStatistics(catName, dbName, tableName, colNames, request.getEngine(), request.getValidWriteIdList()); // we first use the valid stats list to prune the stats boolean isInvalidTxnStats = csOld != null && csOld.isSetIsStatsCompliant() && !csOld.isIsStatsCompliant(); if (isInvalidTxnStats) { // No columns can be merged; a shortcut for getMergableCols. firstColStats.setStatsObj(Lists.newArrayList()); } else { Table t = getTable(catName, dbName, tableName); MetaStoreServerUtils.getMergableCols(firstColStats, t.getParameters()); // we merge those that can be merged if (csOld != null && csOld.getStatsObjSize() != 0 && !firstColStats.getStatsObj().isEmpty()) { MetaStoreServerUtils.mergeColStats(firstColStats, csOld); } } if (!firstColStats.getStatsObj().isEmpty()) { result = updateTableColumnStatsInternal(firstColStats, request.getValidWriteIdList(), request.getWriteId()); } else if (isInvalidTxnStats) { // For now because the stats state is such as it is, we will invalidate everything. // Overall the sematics here are not clear - we could invalide only some columns, but does // that make any physical sense? Could query affect some columns but not others? Table t = getTable(catName, dbName, tableName); t.setWriteId(request.getWriteId()); StatsSetupConst.clearColumnStatsState(t.getParameters()); StatsSetupConst.setBasicStatsState(t.getParameters(), StatsSetupConst.FALSE); ms.alterTable(catName, dbName, tableName, t, request.getValidWriteIdList()); } else { // TODO: why doesn't the original call for non acid tables invalidate the stats? LOG.debug("All the column stats are not accurate to merge."); result = true; } ms.commitTransaction(); isCommitted = true; } finally { if (!isCommitted) { ms.rollbackTransaction(); } } return result; } private Table getTable(String catName, String dbName, String tableName) throws MetaException, InvalidObjectException { return getTable(catName, dbName, tableName, null); } private Table getTable(String catName, String dbName, String tableName, String writeIdList) throws MetaException, InvalidObjectException { Table t = getMS().getTable(catName, dbName, tableName, writeIdList); if (t == null) { throw new InvalidObjectException(TableName.getQualified(catName, dbName, tableName) + " table not found"); } return t; } @Override public NotificationEventResponse get_next_notification(NotificationEventRequest rqst) throws TException { authorizeProxyPrivilege(); RawStore ms = getMS(); return ms.getNextNotification(rqst); } @Override public CurrentNotificationEventId get_current_notificationEventId() throws TException { authorizeProxyPrivilege(); RawStore ms = getMS(); return ms.getCurrentNotificationEventId(); } @Override public NotificationEventsCountResponse get_notification_events_count(NotificationEventsCountRequest rqst) throws TException { authorizeProxyPrivilege(); RawStore ms = getMS(); return ms.getNotificationEventsCount(rqst); } private void authorizeProxyPrivilege() throws TException { // Skip the auth in embedded mode or if the auth is disabled if (!HiveMetaStore.isMetaStoreRemote() || !MetastoreConf.getBoolVar(conf, ConfVars.EVENT_DB_NOTIFICATION_API_AUTH)) { return; } String user = null; try { user = SecurityUtils.getUGI().getShortUserName(); } catch (Exception ex) { LOG.error("Cannot obtain username", ex); throw new TException(ex); } if (!MetaStoreServerUtils.checkUserHasHostProxyPrivileges(user, conf, getIPAddress())) { LOG.error("Not authorized to make the get_notification_events_count call. You can try to disable " + ConfVars.EVENT_DB_NOTIFICATION_API_AUTH.toString()); throw new TException("User " + user + " is not allowed to perform this API call"); } } @Override public FireEventResponse fire_listener_event(FireEventRequest rqst) throws TException { switch (rqst.getData().getSetField()) { case INSERT_DATA: case INSERT_DATAS: String catName = rqst.isSetCatName() ? rqst.getCatName() : getDefaultCatalog(conf); String dbName = rqst.getDbName(); String tblName = rqst.getTableName(); boolean isSuccessful = rqst.isSuccessful(); List<InsertEvent> events = new ArrayList<>(); if (rqst.getData().isSetInsertData()) { events.add(new InsertEvent(catName, dbName, tblName, rqst.getPartitionVals(), rqst.getData().getInsertData(), isSuccessful, this)); } else { // this is a bulk fire insert event operation // we use the partition values field from the InsertEventRequestData object // instead of the FireEventRequest object for (InsertEventRequestData insertData : rqst.getData().getInsertDatas()) { if (!insertData.isSetPartitionVal()) { throw new MetaException( "Partition values must be set when firing multiple insert events"); } events.add(new InsertEvent(catName, dbName, tblName, insertData.getPartitionVal(), insertData, isSuccessful, this)); } } FireEventResponse response = new FireEventResponse(); for (InsertEvent event : events) { /* * The transactional listener response will be set already on the event, so there is not need * to pass the response to the non-transactional listener. */ MetaStoreListenerNotifier .notifyEvent(transactionalListeners, EventType.INSERT, event); MetaStoreListenerNotifier.notifyEvent(listeners, EventType.INSERT, event); if (event.getParameters() != null && event.getParameters() .containsKey( MetaStoreEventListenerConstants.DB_NOTIFICATION_EVENT_ID_KEY_NAME)) { response.addToEventIds(Long.valueOf(event.getParameters() .get(MetaStoreEventListenerConstants.DB_NOTIFICATION_EVENT_ID_KEY_NAME))); } else { String msg = "Insert event id not generated for "; if (event.getPartitionObj() != null) { msg += "partition " + Arrays .toString(event.getPartitionObj().getValues().toArray()) + " of "; } msg += " of table " + event.getTableObj().getDbName() + "." + event.getTableObj() .getTableName(); LOG.warn(msg); } } return response; default: throw new TException("Event type " + rqst.getData().getSetField().toString() + " not currently supported."); } } @Override public GetFileMetadataByExprResult get_file_metadata_by_expr(GetFileMetadataByExprRequest req) throws TException { GetFileMetadataByExprResult result = new GetFileMetadataByExprResult(); RawStore ms = getMS(); if (!ms.isFileMetadataSupported()) { result.setIsSupported(false); result.setMetadata(Collections.emptyMap()); // Set the required field. return result; } result.setIsSupported(true); List<Long> fileIds = req.getFileIds(); boolean needMetadata = !req.isSetDoGetFooters() || req.isDoGetFooters(); FileMetadataExprType type = req.isSetType() ? req.getType() : FileMetadataExprType.ORC_SARG; ByteBuffer[] metadatas = needMetadata ? new ByteBuffer[fileIds.size()] : null; ByteBuffer[] ppdResults = new ByteBuffer[fileIds.size()]; boolean[] eliminated = new boolean[fileIds.size()]; getMS().getFileMetadataByExpr(fileIds, type, req.getExpr(), metadatas, ppdResults, eliminated); for (int i = 0; i < fileIds.size(); ++i) { if (!eliminated[i] && ppdResults[i] == null) { continue; // No metadata => no ppd. } MetadataPpdResult mpr = new MetadataPpdResult(); ByteBuffer ppdResult = eliminated[i] ? null : handleReadOnlyBufferForThrift(ppdResults[i]); mpr.setIncludeBitset(ppdResult); if (needMetadata) { ByteBuffer metadata = eliminated[i] ? null : handleReadOnlyBufferForThrift(metadatas[i]); mpr.setMetadata(metadata); } result.putToMetadata(fileIds.get(i), mpr); } if (!result.isSetMetadata()) { result.setMetadata(Collections.emptyMap()); // Set the required field. } return result; } @Override public GetFileMetadataResult get_file_metadata(GetFileMetadataRequest req) throws TException { GetFileMetadataResult result = new GetFileMetadataResult(); RawStore ms = getMS(); if (!ms.isFileMetadataSupported()) { result.setIsSupported(false); result.setMetadata(Collections.emptyMap()); // Set the required field. return result; } result.setIsSupported(true); List<Long> fileIds = req.getFileIds(); ByteBuffer[] metadatas = ms.getFileMetadata(fileIds); assert metadatas.length == fileIds.size(); for (int i = 0; i < metadatas.length; ++i) { ByteBuffer bb = metadatas[i]; if (bb == null) { continue; } bb = handleReadOnlyBufferForThrift(bb); result.putToMetadata(fileIds.get(i), bb); } if (!result.isSetMetadata()) { result.setMetadata(Collections.emptyMap()); // Set the required field. } return result; } private ByteBuffer handleReadOnlyBufferForThrift(ByteBuffer bb) { if (!bb.isReadOnly()) { return bb; } // Thrift cannot write read-only buffers... oh well. // TODO: actually thrift never writes to the buffer, so we could use reflection to // unset the unnecessary read-only flag if allocation/copy perf becomes a problem. ByteBuffer copy = ByteBuffer.allocate(bb.capacity()); copy.put(bb); copy.flip(); return copy; } @Override public PutFileMetadataResult put_file_metadata(PutFileMetadataRequest req) throws TException { RawStore ms = getMS(); if (ms.isFileMetadataSupported()) { ms.putFileMetadata(req.getFileIds(), req.getMetadata(), req.getType()); } return new PutFileMetadataResult(); } @Override public ClearFileMetadataResult clear_file_metadata(ClearFileMetadataRequest req) throws TException { getMS().putFileMetadata(req.getFileIds(), null, null); return new ClearFileMetadataResult(); } @Override public CacheFileMetadataResult cache_file_metadata( CacheFileMetadataRequest req) throws TException { RawStore ms = getMS(); if (!ms.isFileMetadataSupported()) { return new CacheFileMetadataResult(false); } String dbName = req.getDbName(), tblName = req.getTblName(), partName = req.isSetPartName() ? req.getPartName() : null; boolean isAllPart = req.isSetIsAllParts() && req.isIsAllParts(); ms.openTransaction(); boolean success = false; try { Table tbl = ms.getTable(DEFAULT_CATALOG_NAME, dbName, tblName); if (tbl == null) { throw new NoSuchObjectException(dbName + "." + tblName + " not found"); } boolean isPartitioned = tbl.isSetPartitionKeys() && tbl.getPartitionKeysSize() > 0; String tableInputFormat = tbl.isSetSd() ? tbl.getSd().getInputFormat() : null; if (!isPartitioned) { if (partName != null || isAllPart) { throw new MetaException("Table is not partitioned"); } if (!tbl.isSetSd() || !tbl.getSd().isSetLocation()) { throw new MetaException( "Table does not have storage location; this operation is not supported on views"); } FileMetadataExprType type = expressionProxy.getMetadataType(tableInputFormat); if (type == null) { throw new MetaException("The operation is not supported for " + tableInputFormat); } fileMetadataManager.queueCacheMetadata(tbl.getSd().getLocation(), type); success = true; } else { List<String> partNames; if (partName != null) { partNames = Lists.newArrayList(partName); } else if (isAllPart) { partNames = ms.listPartitionNames(DEFAULT_CATALOG_NAME, dbName, tblName, (short)-1); } else { throw new MetaException("Table is partitioned"); } int batchSize = MetastoreConf.getIntVar( conf, ConfVars.BATCH_RETRIEVE_OBJECTS_MAX); int index = 0; int successCount = 0, failCount = 0; HashSet<String> failFormats = null; while (index < partNames.size()) { int currentBatchSize = Math.min(batchSize, partNames.size() - index); List<String> nameBatch = partNames.subList(index, index + currentBatchSize); index += currentBatchSize; List<Partition> parts = ms.getPartitionsByNames(DEFAULT_CATALOG_NAME, dbName, tblName, nameBatch); for (Partition part : parts) { if (!part.isSetSd() || !part.getSd().isSetLocation()) { throw new MetaException("Partition does not have storage location;" + " this operation is not supported on views"); } String inputFormat = part.getSd().isSetInputFormat() ? part.getSd().getInputFormat() : tableInputFormat; FileMetadataExprType type = expressionProxy.getMetadataType(inputFormat); if (type == null) { ++failCount; if (failFormats == null) { failFormats = new HashSet<>(); } failFormats.add(inputFormat); } else { ++successCount; fileMetadataManager.queueCacheMetadata(part.getSd().getLocation(), type); } } } success = true; // Regardless of the following exception if (failCount > 0) { String errorMsg = "The operation failed for " + failCount + " partitions and " + "succeeded for " + successCount + " partitions; unsupported formats: "; boolean isFirst = true; for (String s : failFormats) { if (!isFirst) { errorMsg += ", "; } isFirst = false; errorMsg += s; } throw new MetaException(errorMsg); } } } finally { if (success) { if (!ms.commitTransaction()) { throw new MetaException("Failed to commit"); } } else { ms.rollbackTransaction(); } } return new CacheFileMetadataResult(true); } @VisibleForTesting void updateMetrics() throws MetaException { if (databaseCount != null) { tableCount.set(getMS().getTableCount()); partCount.set(getMS().getPartitionCount()); databaseCount.set(getMS().getDatabaseCount()); } } @Override public PrimaryKeysResponse get_primary_keys(PrimaryKeysRequest request) throws TException { request.setCatName(request.isSetCatName() ? request.getCatName() : getDefaultCatalog(conf)); startTableFunction("get_primary_keys", request.getCatName(), request.getDb_name(), request.getTbl_name()); List<SQLPrimaryKey> ret = null; Exception ex = null; try { ret = getMS().getPrimaryKeys(request); } catch (Exception e) { ex = e; throwMetaException(e); } finally { endFunction("get_primary_keys", ret != null, ex, request.getTbl_name()); } return new PrimaryKeysResponse(ret); } @Override public ForeignKeysResponse get_foreign_keys(ForeignKeysRequest request) throws TException { request.setCatName(request.isSetCatName() ? request.getCatName() : getDefaultCatalog(conf)); startFunction("get_foreign_keys", " : parentdb=" + request.getParent_db_name() + " parenttbl=" + request.getParent_tbl_name() + " foreigndb=" + request.getForeign_db_name() + " foreigntbl=" + request.getForeign_tbl_name()); List<SQLForeignKey> ret = null; Exception ex = null; try { ret = getMS().getForeignKeys(request); } catch (Exception e) { ex = e; throwMetaException(e); } finally { endFunction("get_foreign_keys", ret != null, ex, request.getForeign_tbl_name()); } return new ForeignKeysResponse(ret); } @Override public UniqueConstraintsResponse get_unique_constraints(UniqueConstraintsRequest request) throws TException { request.setCatName(request.isSetCatName() ? request.getCatName() : getDefaultCatalog(conf)); startTableFunction("get_unique_constraints", request.getCatName(), request.getDb_name(), request.getTbl_name()); List<SQLUniqueConstraint> ret = null; Exception ex = null; try { ret = getMS().getUniqueConstraints(request); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_unique_constraints", ret != null, ex, request.getTbl_name()); } return new UniqueConstraintsResponse(ret); } @Override public NotNullConstraintsResponse get_not_null_constraints(NotNullConstraintsRequest request) throws TException { request.setCatName(request.isSetCatName() ? request.getCatName() : getDefaultCatalog(conf)); startTableFunction("get_not_null_constraints", request.getCatName(), request.getDb_name(), request.getTbl_name()); List<SQLNotNullConstraint> ret = null; Exception ex = null; try { ret = getMS().getNotNullConstraints(request); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_not_null_constraints", ret != null, ex, request.getTbl_name()); } return new NotNullConstraintsResponse(ret); } @Override public DefaultConstraintsResponse get_default_constraints(DefaultConstraintsRequest request) throws TException { request.setCatName(request.isSetCatName() ? request.getCatName() : getDefaultCatalog(conf)); startTableFunction("get_default_constraints", request.getCatName(), request.getDb_name(), request.getTbl_name()); List<SQLDefaultConstraint> ret = null; Exception ex = null; try { ret = getMS().getDefaultConstraints(request); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_default_constraints", ret != null, ex, request.getTbl_name()); } return new DefaultConstraintsResponse(ret); } @Override public CheckConstraintsResponse get_check_constraints(CheckConstraintsRequest request) throws TException { request.setCatName(request.isSetCatName() ? request.getCatName() : getDefaultCatalog(conf)); startTableFunction("get_check_constraints", request.getCatName(), request.getDb_name(), request.getTbl_name()); List<SQLCheckConstraint> ret = null; Exception ex = null; try { ret = getMS().getCheckConstraints(request); } catch (Exception e) { ex = e; throw newMetaException(e); } finally { endFunction("get_check_constraints", ret != null, ex, request.getTbl_name()); } return new CheckConstraintsResponse(ret); } /** * Api to fetch all table constraints at once. * @param request it consist of catalog name, database name and table name to identify the table in metastore * @return all constraints attached to given table * @throws TException */ @Override public AllTableConstraintsResponse get_all_table_constraints(AllTableConstraintsRequest request) throws TException, MetaException, NoSuchObjectException { request.setCatName(request.isSetCatName() ? request.getCatName() : getDefaultCatalog(conf)); startTableFunction("get_all_table_constraints", request.getCatName(), request.getDbName(), request.getTblName()); SQLAllTableConstraints ret = null; Exception ex = null; try { ret = getMS().getAllTableConstraints(request); } catch (Exception e) { ex = e; throwMetaException(e); } finally { endFunction("get_all_table_constraints", ret != null, ex, request.getTblName()); } return new AllTableConstraintsResponse(ret); } @Override public String get_metastore_db_uuid() throws TException { try { return getMS().getMetastoreDbUuid(); } catch (MetaException e) { LOG.error("Exception thrown while querying metastore db uuid", e); throw e; } } @Override public WMCreateResourcePlanResponse create_resource_plan(WMCreateResourcePlanRequest request) throws AlreadyExistsException, InvalidObjectException, MetaException, TException { int defaultPoolSize = MetastoreConf.getIntVar( conf, MetastoreConf.ConfVars.WM_DEFAULT_POOL_SIZE); WMResourcePlan plan = request.getResourcePlan(); if (defaultPoolSize > 0 && plan.isSetQueryParallelism()) { // If the default pool is not disabled, override the size with the specified parallelism. defaultPoolSize = plan.getQueryParallelism(); } try { getMS().createResourcePlan(plan, request.getCopyFrom(), defaultPoolSize); return new WMCreateResourcePlanResponse(); } catch (MetaException e) { LOG.error("Exception while trying to persist resource plan", e); throw e; } } @Override public WMGetResourcePlanResponse get_resource_plan(WMGetResourcePlanRequest request) throws NoSuchObjectException, MetaException, TException { try { WMFullResourcePlan rp = getMS().getResourcePlan(request.getResourcePlanName(), request.getNs()); WMGetResourcePlanResponse resp = new WMGetResourcePlanResponse(); resp.setResourcePlan(rp); return resp; } catch (MetaException e) { LOG.error("Exception while trying to retrieve resource plan", e); throw e; } } @Override public WMGetAllResourcePlanResponse get_all_resource_plans(WMGetAllResourcePlanRequest request) throws MetaException, TException { try { WMGetAllResourcePlanResponse resp = new WMGetAllResourcePlanResponse(); resp.setResourcePlans(getMS().getAllResourcePlans(request.getNs())); return resp; } catch (MetaException e) { LOG.error("Exception while trying to retrieve resource plans", e); throw e; } } @Override public WMAlterResourcePlanResponse alter_resource_plan(WMAlterResourcePlanRequest request) throws NoSuchObjectException, InvalidOperationException, MetaException, TException { try { if (((request.isIsEnableAndActivate() ? 1 : 0) + (request.isIsReplace() ? 1 : 0) + (request.isIsForceDeactivate() ? 1 : 0)) > 1) { throw new MetaException("Invalid request; multiple flags are set"); } WMAlterResourcePlanResponse response = new WMAlterResourcePlanResponse(); // This method will only return full resource plan when activating one, // to give the caller the result atomically with the activation. WMFullResourcePlan fullPlanAfterAlter = getMS().alterResourcePlan( request.getResourcePlanName(), request.getNs(), request.getResourcePlan(), request.isIsEnableAndActivate(), request.isIsForceDeactivate(), request.isIsReplace()); if (fullPlanAfterAlter != null) { response.setFullResourcePlan(fullPlanAfterAlter); } return response; } catch (MetaException e) { LOG.error("Exception while trying to alter resource plan", e); throw e; } } @Override public WMGetActiveResourcePlanResponse get_active_resource_plan( WMGetActiveResourcePlanRequest request) throws MetaException, TException { try { WMGetActiveResourcePlanResponse response = new WMGetActiveResourcePlanResponse(); response.setResourcePlan(getMS().getActiveResourcePlan(request.getNs())); return response; } catch (MetaException e) { LOG.error("Exception while trying to get active resource plan", e); throw e; } } @Override public WMValidateResourcePlanResponse validate_resource_plan(WMValidateResourcePlanRequest request) throws NoSuchObjectException, MetaException, TException { try { return getMS().validateResourcePlan(request.getResourcePlanName(), request.getNs()); } catch (MetaException e) { LOG.error("Exception while trying to validate resource plan", e); throw e; } } @Override public WMDropResourcePlanResponse drop_resource_plan(WMDropResourcePlanRequest request) throws NoSuchObjectException, InvalidOperationException, MetaException, TException { try { getMS().dropResourcePlan(request.getResourcePlanName(), request.getNs()); return new WMDropResourcePlanResponse(); } catch (MetaException e) { LOG.error("Exception while trying to drop resource plan", e); throw e; } } @Override public WMCreateTriggerResponse create_wm_trigger(WMCreateTriggerRequest request) throws AlreadyExistsException, InvalidObjectException, MetaException, TException { try { getMS().createWMTrigger(request.getTrigger()); return new WMCreateTriggerResponse(); } catch (MetaException e) { LOG.error("Exception while trying to create trigger", e); throw e; } } @Override public WMAlterTriggerResponse alter_wm_trigger(WMAlterTriggerRequest request) throws NoSuchObjectException, InvalidObjectException, MetaException, TException { try { getMS().alterWMTrigger(request.getTrigger()); return new WMAlterTriggerResponse(); } catch (MetaException e) { LOG.error("Exception while trying to alter trigger", e); throw e; } } @Override public WMDropTriggerResponse drop_wm_trigger(WMDropTriggerRequest request) throws NoSuchObjectException, InvalidOperationException, MetaException, TException { try { getMS().dropWMTrigger(request.getResourcePlanName(), request.getTriggerName(), request.getNs()); return new WMDropTriggerResponse(); } catch (MetaException e) { LOG.error("Exception while trying to drop trigger.", e); throw e; } } @Override public WMGetTriggersForResourePlanResponse get_triggers_for_resourceplan( WMGetTriggersForResourePlanRequest request) throws NoSuchObjectException, MetaException, TException { try { List<WMTrigger> triggers = getMS().getTriggersForResourcePlan(request.getResourcePlanName(), request.getNs()); WMGetTriggersForResourePlanResponse response = new WMGetTriggersForResourePlanResponse(); response.setTriggers(triggers); return response; } catch (MetaException e) { LOG.error("Exception while trying to retrieve triggers plans", e); throw e; } } @Override public WMAlterPoolResponse alter_wm_pool(WMAlterPoolRequest request) throws AlreadyExistsException, NoSuchObjectException, InvalidObjectException, MetaException, TException { try { getMS().alterPool(request.getPool(), request.getPoolPath()); return new WMAlterPoolResponse(); } catch (MetaException e) { LOG.error("Exception while trying to alter WMPool", e); throw e; } } @Override public WMCreatePoolResponse create_wm_pool(WMCreatePoolRequest request) throws AlreadyExistsException, NoSuchObjectException, InvalidObjectException, MetaException, TException { try { getMS().createPool(request.getPool()); return new WMCreatePoolResponse(); } catch (MetaException e) { LOG.error("Exception while trying to create WMPool", e); throw e; } } @Override public WMDropPoolResponse drop_wm_pool(WMDropPoolRequest request) throws NoSuchObjectException, InvalidOperationException, MetaException, TException { try { getMS().dropWMPool(request.getResourcePlanName(), request.getPoolPath(), request.getNs()); return new WMDropPoolResponse(); } catch (MetaException e) { LOG.error("Exception while trying to drop WMPool", e); throw e; } } @Override public WMCreateOrUpdateMappingResponse create_or_update_wm_mapping( WMCreateOrUpdateMappingRequest request) throws AlreadyExistsException, NoSuchObjectException, InvalidObjectException, MetaException, TException { try { getMS().createOrUpdateWMMapping(request.getMapping(), request.isUpdate()); return new WMCreateOrUpdateMappingResponse(); } catch (MetaException e) { LOG.error("Exception while trying to create or update WMMapping", e); throw e; } } @Override public WMDropMappingResponse drop_wm_mapping(WMDropMappingRequest request) throws NoSuchObjectException, InvalidOperationException, MetaException, TException { try { getMS().dropWMMapping(request.getMapping()); return new WMDropMappingResponse(); } catch (MetaException e) { LOG.error("Exception while trying to drop WMMapping", e); throw e; } } @Override public WMCreateOrDropTriggerToPoolMappingResponse create_or_drop_wm_trigger_to_pool_mapping( WMCreateOrDropTriggerToPoolMappingRequest request) throws AlreadyExistsException, NoSuchObjectException, InvalidObjectException, MetaException, TException { try { if (request.isDrop()) { getMS().dropWMTriggerToPoolMapping(request.getResourcePlanName(), request.getTriggerName(), request.getPoolPath(), request.getNs()); } else { getMS().createWMTriggerToPoolMapping(request.getResourcePlanName(), request.getTriggerName(), request.getPoolPath(), request.getNs()); } return new WMCreateOrDropTriggerToPoolMappingResponse(); } catch (MetaException e) { LOG.error("Exception while trying to create or drop pool mappings", e); throw e; } } @Override public void create_ischema(ISchema schema) throws TException { startFunction("create_ischema", ": " + schema.getName()); boolean success = false; Exception ex = null; RawStore ms = getMS(); try { firePreEvent(new PreCreateISchemaEvent(this, schema)); Map<String, String> transactionalListenersResponses = Collections.emptyMap(); ms.openTransaction(); try { ms.createISchema(schema); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.CREATE_ISCHEMA, new CreateISchemaEvent(true, this, schema)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.CREATE_ISCHEMA, new CreateISchemaEvent(success, this, schema), null, transactionalListenersResponses, ms); } } } catch (MetaException|AlreadyExistsException e) { LOG.error("Caught exception creating schema", e); ex = e; throw e; } finally { endFunction("create_ischema", success, ex); } } @Override public void alter_ischema(AlterISchemaRequest rqst) throws TException { startFunction("alter_ischema", ": " + rqst); boolean success = false; Exception ex = null; RawStore ms = getMS(); try { ISchema oldSchema = ms.getISchema(rqst.getName()); if (oldSchema == null) { throw new NoSuchObjectException("Could not find schema " + rqst.getName()); } firePreEvent(new PreAlterISchemaEvent(this, oldSchema, rqst.getNewSchema())); Map<String, String> transactionalListenersResponses = Collections.emptyMap(); ms.openTransaction(); try { ms.alterISchema(rqst.getName(), rqst.getNewSchema()); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ALTER_ISCHEMA, new AlterISchemaEvent(true, this, oldSchema, rqst.getNewSchema())); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_ISCHEMA, new AlterISchemaEvent(success, this, oldSchema, rqst.getNewSchema()), null, transactionalListenersResponses, ms); } } } catch (MetaException|NoSuchObjectException e) { LOG.error("Caught exception altering schema", e); ex = e; throw e; } finally { endFunction("alter_ischema", success, ex); } } @Override public ISchema get_ischema(ISchemaName schemaName) throws TException { startFunction("get_ischema", ": " + schemaName); Exception ex = null; ISchema schema = null; try { schema = getMS().getISchema(schemaName); if (schema == null) { throw new NoSuchObjectException("No schema named " + schemaName + " exists"); } firePreEvent(new PreReadISchemaEvent(this, schema)); return schema; } catch (MetaException e) { LOG.error("Caught exception getting schema", e); ex = e; throw e; } finally { endFunction("get_ischema", schema != null, ex); } } @Override public void drop_ischema(ISchemaName schemaName) throws TException { startFunction("drop_ischema", ": " + schemaName); Exception ex = null; boolean success = false; RawStore ms = getMS(); try { // look for any valid versions. This will also throw NoSuchObjectException if the schema // itself doesn't exist, which is what we want. SchemaVersion latest = ms.getLatestSchemaVersion(schemaName); if (latest != null) { ex = new InvalidOperationException("Schema " + schemaName + " cannot be dropped, it has" + " at least one valid version"); throw (InvalidObjectException)ex; } ISchema schema = ms.getISchema(schemaName); firePreEvent(new PreDropISchemaEvent(this, schema)); Map<String, String> transactionalListenersResponses = Collections.emptyMap(); ms.openTransaction(); try { ms.dropISchema(schemaName); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DROP_ISCHEMA, new DropISchemaEvent(true, this, schema)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_ISCHEMA, new DropISchemaEvent(success, this, schema), null, transactionalListenersResponses, ms); } } } catch (MetaException|NoSuchObjectException e) { LOG.error("Caught exception dropping schema", e); ex = e; throw e; } finally { endFunction("drop_ischema", success, ex); } } @Override public void add_schema_version(SchemaVersion schemaVersion) throws TException { startFunction("add_schema_version", ": " + schemaVersion); boolean success = false; Exception ex = null; RawStore ms = getMS(); try { // Make sure the referenced schema exists if (ms.getISchema(schemaVersion.getSchema()) == null) { throw new NoSuchObjectException("No schema named " + schemaVersion.getSchema()); } firePreEvent(new PreAddSchemaVersionEvent(this, schemaVersion)); Map<String, String> transactionalListenersResponses = Collections.emptyMap(); ms.openTransaction(); try { ms.addSchemaVersion(schemaVersion); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ADD_SCHEMA_VERSION, new AddSchemaVersionEvent(true, this, schemaVersion)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ADD_SCHEMA_VERSION, new AddSchemaVersionEvent(success, this, schemaVersion), null, transactionalListenersResponses, ms); } } } catch (MetaException|AlreadyExistsException e) { LOG.error("Caught exception adding schema version", e); ex = e; throw e; } finally { endFunction("add_schema_version", success, ex); } } @Override public SchemaVersion get_schema_version(SchemaVersionDescriptor version) throws TException { startFunction("get_schema_version", ": " + version); Exception ex = null; SchemaVersion schemaVersion = null; try { schemaVersion = getMS().getSchemaVersion(version); if (schemaVersion == null) { throw new NoSuchObjectException("No schema version " + version + "exists"); } firePreEvent(new PreReadhSchemaVersionEvent(this, Collections.singletonList(schemaVersion))); return schemaVersion; } catch (MetaException e) { LOG.error("Caught exception getting schema version", e); ex = e; throw e; } finally { endFunction("get_schema_version", schemaVersion != null, ex); } } @Override public SchemaVersion get_schema_latest_version(ISchemaName schemaName) throws TException { startFunction("get_latest_schema_version", ": " + schemaName); Exception ex = null; SchemaVersion schemaVersion = null; try { schemaVersion = getMS().getLatestSchemaVersion(schemaName); if (schemaVersion == null) { throw new NoSuchObjectException("No versions of schema " + schemaName + "exist"); } firePreEvent(new PreReadhSchemaVersionEvent(this, Collections.singletonList(schemaVersion))); return schemaVersion; } catch (MetaException e) { LOG.error("Caught exception getting latest schema version", e); ex = e; throw e; } finally { endFunction("get_latest_schema_version", schemaVersion != null, ex); } } @Override public List<SchemaVersion> get_schema_all_versions(ISchemaName schemaName) throws TException { startFunction("get_all_schema_versions", ": " + schemaName); Exception ex = null; List<SchemaVersion> schemaVersions = null; try { schemaVersions = getMS().getAllSchemaVersion(schemaName); if (schemaVersions == null) { throw new NoSuchObjectException("No versions of schema " + schemaName + "exist"); } firePreEvent(new PreReadhSchemaVersionEvent(this, schemaVersions)); return schemaVersions; } catch (MetaException e) { LOG.error("Caught exception getting all schema versions", e); ex = e; throw e; } finally { endFunction("get_all_schema_versions", schemaVersions != null, ex); } } @Override public void drop_schema_version(SchemaVersionDescriptor version) throws TException { startFunction("drop_schema_version", ": " + version); Exception ex = null; boolean success = false; RawStore ms = getMS(); try { SchemaVersion schemaVersion = ms.getSchemaVersion(version); if (schemaVersion == null) { throw new NoSuchObjectException("No schema version " + version); } firePreEvent(new PreDropSchemaVersionEvent(this, schemaVersion)); Map<String, String> transactionalListenersResponses = Collections.emptyMap(); ms.openTransaction(); try { ms.dropSchemaVersion(version); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.DROP_SCHEMA_VERSION, new DropSchemaVersionEvent(true, this, schemaVersion)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.DROP_SCHEMA_VERSION, new DropSchemaVersionEvent(success, this, schemaVersion), null, transactionalListenersResponses, ms); } } } catch (MetaException|NoSuchObjectException e) { LOG.error("Caught exception dropping schema version", e); ex = e; throw e; } finally { endFunction("drop_schema_version", success, ex); } } @Override public FindSchemasByColsResp get_schemas_by_cols(FindSchemasByColsRqst rqst) throws TException { startFunction("get_schemas_by_cols"); Exception ex = null; List<SchemaVersion> schemaVersions = Collections.emptyList(); try { schemaVersions = getMS().getSchemaVersionsByColumns(rqst.getColName(), rqst.getColNamespace(), rqst.getType()); firePreEvent(new PreReadhSchemaVersionEvent(this, schemaVersions)); final List<SchemaVersionDescriptor> entries = new ArrayList<>(schemaVersions.size()); schemaVersions.forEach(schemaVersion -> entries.add( new SchemaVersionDescriptor(schemaVersion.getSchema(), schemaVersion.getVersion()))); return new FindSchemasByColsResp(entries); } catch (MetaException e) { LOG.error("Caught exception doing schema version query", e); ex = e; throw e; } finally { endFunction("get_schemas_by_cols", !schemaVersions.isEmpty(), ex); } } @Override public void map_schema_version_to_serde(MapSchemaVersionToSerdeRequest rqst) throws TException { startFunction("map_schema_version_to_serde, :" + rqst); boolean success = false; Exception ex = null; RawStore ms = getMS(); try { SchemaVersion oldSchemaVersion = ms.getSchemaVersion(rqst.getSchemaVersion()); if (oldSchemaVersion == null) { throw new NoSuchObjectException("No schema version " + rqst.getSchemaVersion()); } SerDeInfo serde = ms.getSerDeInfo(rqst.getSerdeName()); if (serde == null) { throw new NoSuchObjectException("No SerDe named " + rqst.getSerdeName()); } SchemaVersion newSchemaVersion = new SchemaVersion(oldSchemaVersion); newSchemaVersion.setSerDe(serde); firePreEvent(new PreAlterSchemaVersionEvent(this, oldSchemaVersion, newSchemaVersion)); Map<String, String> transactionalListenersResponses = Collections.emptyMap(); ms.openTransaction(); try { ms.alterSchemaVersion(rqst.getSchemaVersion(), newSchemaVersion); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ALTER_SCHEMA_VERSION, new AlterSchemaVersionEvent(true, this, oldSchemaVersion, newSchemaVersion)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_SCHEMA_VERSION, new AlterSchemaVersionEvent(success, this, oldSchemaVersion, newSchemaVersion), null, transactionalListenersResponses, ms); } } } catch (MetaException|NoSuchObjectException e) { LOG.error("Caught exception mapping schema version to serde", e); ex = e; throw e; } finally { endFunction("map_schema_version_to_serde", success, ex); } } @Override public void set_schema_version_state(SetSchemaVersionStateRequest rqst) throws TException { startFunction("set_schema_version_state, :" + rqst); boolean success = false; Exception ex = null; RawStore ms = getMS(); try { SchemaVersion oldSchemaVersion = ms.getSchemaVersion(rqst.getSchemaVersion()); if (oldSchemaVersion == null) { throw new NoSuchObjectException("No schema version " + rqst.getSchemaVersion()); } SchemaVersion newSchemaVersion = new SchemaVersion(oldSchemaVersion); newSchemaVersion.setState(rqst.getState()); firePreEvent(new PreAlterSchemaVersionEvent(this, oldSchemaVersion, newSchemaVersion)); Map<String, String> transactionalListenersResponses = Collections.emptyMap(); ms.openTransaction(); try { ms.alterSchemaVersion(rqst.getSchemaVersion(), newSchemaVersion); if (!transactionalListeners.isEmpty()) { transactionalListenersResponses = MetaStoreListenerNotifier.notifyEvent(transactionalListeners, EventType.ALTER_SCHEMA_VERSION, new AlterSchemaVersionEvent(true, this, oldSchemaVersion, newSchemaVersion)); } success = ms.commitTransaction(); } finally { if (!success) { ms.rollbackTransaction(); } if (!listeners.isEmpty()) { MetaStoreListenerNotifier.notifyEvent(listeners, EventType.ALTER_SCHEMA_VERSION, new AlterSchemaVersionEvent(success, this, oldSchemaVersion, newSchemaVersion), null, transactionalListenersResponses, ms); } } } catch (MetaException|NoSuchObjectException e) { LOG.error("Caught exception changing schema version state", e); ex = e; throw e; } finally { endFunction("set_schema_version_state", success, ex); } } @Override public void add_serde(SerDeInfo serde) throws TException { startFunction("create_serde", ": " + serde.getName()); Exception ex = null; boolean success = false; RawStore ms = getMS(); try { ms.openTransaction(); ms.addSerde(serde); success = ms.commitTransaction(); } catch (MetaException|AlreadyExistsException e) { LOG.error("Caught exception creating serde", e); ex = e; throw e; } finally { if (!success) { ms.rollbackTransaction(); } endFunction("create_serde", success, ex); } } @Override public SerDeInfo get_serde(GetSerdeRequest rqst) throws TException { startFunction("get_serde", ": " + rqst); Exception ex = null; SerDeInfo serde = null; try { serde = getMS().getSerDeInfo(rqst.getSerdeName()); if (serde == null) { throw new NoSuchObjectException("No serde named " + rqst.getSerdeName() + " exists"); } return serde; } catch (MetaException e) { LOG.error("Caught exception getting serde", e); ex = e; throw e; } finally { endFunction("get_serde", serde != null, ex); } } @Override public LockResponse get_lock_materialization_rebuild(String dbName, String tableName, long txnId) throws TException { return getTxnHandler().lockMaterializationRebuild(dbName, tableName, txnId); } @Override public boolean heartbeat_lock_materialization_rebuild(String dbName, String tableName, long txnId) throws TException { return getTxnHandler().heartbeatLockMaterializationRebuild(dbName, tableName, txnId); } @Override public void add_runtime_stats(RuntimeStat stat) throws TException { startFunction("store_runtime_stats"); Exception ex = null; boolean success = false; RawStore ms = getMS(); try { ms.openTransaction(); ms.addRuntimeStat(stat); success = ms.commitTransaction(); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { if (!success) { ms.rollbackTransaction(); } endFunction("store_runtime_stats", success, ex); } } @Override public List<RuntimeStat> get_runtime_stats(GetRuntimeStatsRequest rqst) throws TException { startFunction("get_runtime_stats"); Exception ex = null; try { List<RuntimeStat> res = getMS().getRuntimeStats(rqst.getMaxWeight(), rqst.getMaxCreateTime()); return res; } catch (MetaException e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("get_runtime_stats", ex == null, ex); } } @Override public ScheduledQueryPollResponse scheduled_query_poll(ScheduledQueryPollRequest request) throws MetaException, TException { startFunction("scheduled_query_poll"); Exception ex = null; try { RawStore ms = getMS(); return ms.scheduledQueryPoll(request); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("scheduled_query_poll", ex == null, ex); } } @Override public void scheduled_query_maintenance(ScheduledQueryMaintenanceRequest request) throws MetaException, TException { startFunction("scheduled_query_poll"); Exception ex = null; try { RawStore ms = getMS(); ms.scheduledQueryMaintenance(request); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("scheduled_query_poll", ex == null, ex); } } @Override public void scheduled_query_progress(ScheduledQueryProgressInfo info) throws MetaException, TException { startFunction("scheduled_query_poll"); Exception ex = null; try { RawStore ms = getMS(); ms.scheduledQueryProgress(info); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("scheduled_query_poll", ex == null, ex); } } @Override public ScheduledQuery get_scheduled_query(ScheduledQueryKey scheduleKey) throws TException { startFunction("get_scheduled_query"); Exception ex = null; try { return getMS().getScheduledQuery(scheduleKey); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("get_scheduled_query", ex == null, ex); } } @Override public void add_replication_metrics(ReplicationMetricList replicationMetricList) throws MetaException{ startFunction("add_replication_metrics"); Exception ex = null; try { getMS().addReplicationMetrics(replicationMetricList); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("add_replication_metrics", ex == null, ex); } } @Override public ReplicationMetricList get_replication_metrics(GetReplicationMetricsRequest getReplicationMetricsRequest) throws MetaException{ startFunction("get_replication_metrics"); Exception ex = null; try { return getMS().getReplicationMetrics(getReplicationMetricsRequest); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("get_replication_metrics", ex == null, ex); } } @Override public void create_stored_procedure(StoredProcedure proc) throws NoSuchObjectException, MetaException { startFunction("create_stored_procedure"); Exception ex = null; throwUnsupportedExceptionIfRemoteDB(proc.getDbName(), "create_stored_procedure"); try { getMS().createOrUpdateStoredProcedure(proc); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("create_stored_procedure", ex == null, ex); } } public StoredProcedure get_stored_procedure(StoredProcedureRequest request) throws MetaException, NoSuchObjectException { startFunction("get_stored_procedure"); Exception ex = null; try { StoredProcedure proc = getMS().getStoredProcedure(request.getCatName(), request.getDbName(), request.getProcName()); if (proc == null) { throw new NoSuchObjectException( "HPL/SQL StoredProcedure " + request.getDbName() + "." + request.getProcName() + " does not exist"); } return proc; } catch (Exception e) { if (!(e instanceof NoSuchObjectException)) { LOG.error("Caught exception", e); } ex = e; throw e; } finally { endFunction("get_stored_procedure", ex == null, ex); } } @Override public void drop_stored_procedure(StoredProcedureRequest request) throws MetaException { startFunction("drop_stored_procedure"); Exception ex = null; try { getMS().dropStoredProcedure(request.getCatName(), request.getDbName(), request.getProcName()); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("drop_stored_procedure", ex == null, ex); } } @Override public List<String> get_all_stored_procedures(ListStoredProcedureRequest request) throws MetaException { startFunction("get_all_stored_procedures"); Exception ex = null; try { return getMS().getAllStoredProcedures(request); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("get_all_stored_procedures", ex == null, ex); } } public Package find_package(GetPackageRequest request) throws MetaException, NoSuchObjectException { startFunction("find_package"); Exception ex = null; try { Package pkg = getMS().findPackage(request); if (pkg == null) { throw new NoSuchObjectException( "HPL/SQL package " + request.getDbName() + "." + request.getPackageName() + " does not exist"); } return pkg; } catch (Exception e) { if (!(e instanceof NoSuchObjectException)) { LOG.error("Caught exception", e); } ex = e; throw e; } finally { endFunction("find_package", ex == null, ex); } } public void add_package(AddPackageRequest request) throws MetaException, NoSuchObjectException { startFunction("add_package"); Exception ex = null; try { getMS().addPackage(request); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("add_package", ex == null, ex); } } public List<String> get_all_packages(ListPackageRequest request) throws MetaException { startFunction("get_all_packages"); Exception ex = null; try { return getMS().listPackages(request); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("get_all_packages", ex == null, ex); } } public void drop_package(DropPackageRequest request) throws MetaException { startFunction("drop_package"); Exception ex = null; try { getMS().dropPackage(request); } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("drop_package", ex == null, ex); } } @Override public List<WriteEventInfo> get_all_write_event_info(GetAllWriteEventInfoRequest request) throws MetaException { startFunction("get_all_write_event_info"); Exception ex = null; try { List<WriteEventInfo> writeEventInfoList = getMS().getAllWriteEventInfo(request.getTxnId(), request.getDbName(), request.getTableName()); return writeEventInfoList == null ? Collections.emptyList() : writeEventInfoList; } catch (Exception e) { LOG.error("Caught exception", e); ex = e; throw e; } finally { endFunction("get_all_write_event_info", ex == null, ex); } } }
HIVE-25633: Prevent shutdown of MetaStore scheduled worker ThreadPool (reviewed by Eugene Chung and Krisztian Kasa) (#2737)
standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java
HIVE-25633: Prevent shutdown of MetaStore scheduled worker ThreadPool (reviewed by Eugene Chung and Krisztian Kasa) (#2737)
Java
apache-2.0
f17793dfdff1158d27f2f6c60434ec2c0c2ebeba
0
sequenceiq/cloudbreak,sequenceiq/cloudbreak,sequenceiq/cloudbreak,hortonworks/cloudbreak,hortonworks/cloudbreak,hortonworks/cloudbreak,hortonworks/cloudbreak,hortonworks/cloudbreak,sequenceiq/cloudbreak,hortonworks/cloudbreak,sequenceiq/cloudbreak
package com.sequenceiq.cloudbreak.orchestrator.salt.client; import static com.sequenceiq.cloudbreak.orchestrator.salt.client.SaltEndpoint.BOOT_HOSTNAME_ENDPOINT; import static java.util.Collections.singletonMap; import java.io.ByteArrayInputStream; import java.io.Closeable; import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import javax.ws.rs.client.Client; import javax.ws.rs.client.Entity; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.Form; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.apache.http.HttpStatus; import org.glassfish.jersey.client.authentication.HttpAuthenticationFeature; import org.glassfish.jersey.media.multipart.Boundary; import org.glassfish.jersey.media.multipart.FormDataMultiPart; import org.glassfish.jersey.media.multipart.MultiPart; import org.glassfish.jersey.media.multipart.file.StreamDataBodyPart; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.sequenceiq.cloudbreak.client.RestClientUtil; import com.sequenceiq.cloudbreak.client.PkiUtil; import com.sequenceiq.cloudbreak.orchestrator.exception.CloudbreakOrchestratorFailedException; import com.sequenceiq.cloudbreak.orchestrator.model.GatewayConfig; import com.sequenceiq.cloudbreak.orchestrator.model.GenericResponse; import com.sequenceiq.cloudbreak.orchestrator.model.GenericResponses; import com.sequenceiq.cloudbreak.orchestrator.salt.client.target.Target; import com.sequenceiq.cloudbreak.orchestrator.salt.domain.Pillar; import com.sequenceiq.cloudbreak.orchestrator.salt.domain.SaltAction; import com.sequenceiq.cloudbreak.util.JaxRSUtil; public class SaltConnector implements Closeable { private static final Logger LOGGER = LoggerFactory.getLogger(SaltConnector.class); private static final ObjectMapper MAPPER = new ObjectMapper(); private static final String SALT_USER = "saltuser"; private static final String SALT_PASSWORD = "saltpass"; private static final String SALT_BOOT_USER = "cbadmin"; private static final String SALT_BOOT_PASSWORD = "cbadmin"; private static final String SIGN_HEADER = "signature"; private static final List<Integer> ACCEPTED_STATUSES = Arrays.asList(HttpStatus.SC_OK, HttpStatus.SC_CREATED, HttpStatus.SC_ACCEPTED); private final Client restClient; private final WebTarget saltTarget; private final String saltPassword; private final String signatureKey; public SaltConnector(GatewayConfig gatewayConfig, boolean debug) { try { this.restClient = RestClientUtil.createClient( gatewayConfig.getServerCert(), gatewayConfig.getClientCert(), gatewayConfig.getClientKey(), debug, SaltConnector.class); String saltBootPasswd = Optional.ofNullable(gatewayConfig.getSaltBootPassword()).orElse(SALT_BOOT_PASSWORD); HttpAuthenticationFeature feature = HttpAuthenticationFeature.basic(SALT_BOOT_USER, saltBootPasswd); this.saltTarget = restClient.target(gatewayConfig.getGatewayUrl()).register(feature); this.saltPassword = Optional.ofNullable(gatewayConfig.getSaltPassword()).orElse(SALT_PASSWORD); this.signatureKey = gatewayConfig.getSignatureKey(); } catch (Exception e) { throw new RuntimeException("Failed to create rest client with 2-way-ssl config", e); } } public GenericResponse health() { Response response = saltTarget.path(SaltEndpoint.BOOT_HEALTH.getContextPath()).request().get(); GenericResponse responseEntity = JaxRSUtil.response(response, GenericResponse.class); LOGGER.info("Health response: {}", responseEntity); return responseEntity; } public GenericResponses pillar(Set<String> targets, Pillar pillar) { Response distributeResponse = saltTarget.path(SaltEndpoint.BOOT_PILLAR_DISTRIBUTE.getContextPath()).request() .header(SIGN_HEADER, PkiUtil.generateSignature(signatureKey, toJson(pillar).getBytes())) .post(Entity.json(pillar)); if (distributeResponse.getStatus() == HttpStatus.SC_NOT_FOUND) { // simple pillar save for CB <= 1.14 distributeResponse.close(); Response singleResponse = saltTarget.path(SaltEndpoint.BOOT_PILLAR_SAVE.getContextPath()).request() .header(SIGN_HEADER, PkiUtil.generateSignature(signatureKey, toJson(pillar).getBytes())) .post(Entity.json(pillar)); GenericResponses genericResponses = new GenericResponses(); GenericResponse genericResponse = new GenericResponse(); genericResponse.setAddress(targets.iterator().next()); genericResponse.setStatusCode(singleResponse.getStatus()); genericResponses.setResponses(Collections.singletonList(genericResponse)); singleResponse.close(); return genericResponses; } return JaxRSUtil.response(distributeResponse, GenericResponses.class); } public GenericResponses action(SaltAction saltAction) { Response response = saltTarget.path(SaltEndpoint.BOOT_ACTION_DISTRIBUTE.getContextPath()).request() .header(SIGN_HEADER, PkiUtil.generateSignature(signatureKey, toJson(saltAction).getBytes())) .post(Entity.json(saltAction)); GenericResponses responseEntity = JaxRSUtil.response(response, GenericResponses.class); LOGGER.info("SaltAction response: {}", responseEntity); return responseEntity; } public <T> T run(Target<String> target, String fun, SaltClientType clientType, Class<T> clazz, String... arg) { Form form = new Form(); form = addAuth(form) .param("fun", fun) .param("client", clientType.getType()) .param("tgt", target.getTarget()) .param("expr_form", target.getType()); if (arg != null) { if (clientType.equals(SaltClientType.LOCAL) || clientType.equals(SaltClientType.LOCAL_ASYNC)) { for (String a : arg) { form.param("arg", a); } } else { for (int i = 0; i < arg.length - 1; i = i + 2) { form.param(arg[i], arg[i + 1]); } } } Response response = saltTarget.path(SaltEndpoint.SALT_RUN.getContextPath()).request() .header(SIGN_HEADER, PkiUtil.generateSignature(signatureKey, toJson(form.asMap()).getBytes())) .post(Entity.form(form)); T responseEntity = JaxRSUtil.response(response, clazz); LOGGER.info("Salt run response: {}", responseEntity); return responseEntity; } public <T> T wheel(String fun, Collection<String> match, Class<T> clazz) { Form form = new Form(); form = addAuth(form) .param("fun", fun) .param("client", "wheel"); if (match != null && !match.isEmpty()) { form.param("match", match.stream().collect(Collectors.joining(","))); } Response response = saltTarget.path(SaltEndpoint.SALT_RUN.getContextPath()).request() .header(SIGN_HEADER, PkiUtil.generateSignature(signatureKey, toJson(form.asMap()).getBytes())) .post(Entity.form(form)); T responseEntity = JaxRSUtil.response(response, clazz); LOGGER.info("SaltAction response: {}", responseEntity); return responseEntity; } public GenericResponses upload(Set<String> targets, String path, String fileName, byte[] content) throws IOException { Response distributeResponse = upload(SaltEndpoint.BOOT_FILE_DISTRIBUTE.getContextPath(), targets, path, fileName, content); if (distributeResponse.getStatus() == HttpStatus.SC_NOT_FOUND) { // simple file upload for CB <= 1.14 distributeResponse.close(); Response singleResponse = upload(SaltEndpoint.BOOT_FILE_UPLOAD.getContextPath(), targets, path, fileName, content); GenericResponses genericResponses = new GenericResponses(); GenericResponse genericResponse = new GenericResponse(); genericResponse.setAddress(targets.iterator().next()); genericResponse.setStatusCode(singleResponse.getStatus()); genericResponses.setResponses(Collections.singletonList(genericResponse)); singleResponse.close(); return genericResponses; } return JaxRSUtil.response(distributeResponse, GenericResponses.class); } private Response upload(String endpoint, Set<String> targets, String path, String fileName, byte[] content) throws IOException { try (ByteArrayInputStream inputStream = new ByteArrayInputStream(content)) { StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", inputStream, fileName); MultiPart multiPart = new FormDataMultiPart().field("path", path).field("targets", String.join(",", targets)).bodyPart(streamDataBodyPart); MediaType contentType = MediaType.MULTIPART_FORM_DATA_TYPE; contentType = Boundary.addBoundary(contentType); String signature = PkiUtil.generateSignature(signatureKey, content); return saltTarget.path(endpoint).request().header(SIGN_HEADER, signature).post(Entity.entity(multiPart, contentType)); } } public Map<String, String> members(List<String> privateIps) throws CloudbreakOrchestratorFailedException { Map<String, List<String>> clients = singletonMap("clients", privateIps); Response response = saltTarget.path(BOOT_HOSTNAME_ENDPOINT.getContextPath()).request() .header(SIGN_HEADER, PkiUtil.generateSignature(signatureKey, toJson(clients).getBytes())) .post(Entity.json(clients)); GenericResponses responses = JaxRSUtil.response(response, GenericResponses.class); List<GenericResponse> failedResponses = responses.getResponses().stream() .filter(genericResponse -> !ACCEPTED_STATUSES.contains(genericResponse.getStatusCode())).collect(Collectors.toList()); if (!failedResponses.isEmpty()) { failedResponseErrorLog(failedResponses); String failedNodeAddresses = failedResponses.stream().map(GenericResponse::getAddress).collect(Collectors.joining(",")); throw new CloudbreakOrchestratorFailedException("Hostname resolution failed for nodes: " + failedNodeAddresses); } return responses.getResponses().stream().collect(Collectors.toMap(GenericResponse::getAddress, GenericResponse::getStatus)); } private void failedResponseErrorLog(List<GenericResponse> failedResponses) { StringBuilder failedResponsesErrorMessage = new StringBuilder(); failedResponsesErrorMessage.append("Failed response from salt bootstrap, endpoint: ").append(BOOT_HOSTNAME_ENDPOINT); for (GenericResponse failedResponse : failedResponses) { failedResponsesErrorMessage.append("\n").append("Status code: ").append(failedResponse.getStatusCode()); failedResponsesErrorMessage.append(" Error message: ").append(failedResponse.getStatus()); } LOGGER.error(failedResponsesErrorMessage.toString()); } private Form addAuth(Form form) { form.param("username", SALT_USER) .param("password", saltPassword) .param("eauth", "pam"); return form; } @Override public void close() throws IOException { if (restClient != null) { restClient.close(); } } public String getSaltPassword() { return saltPassword; } private String toJson(Object target) { try { return MAPPER.writeValueAsString(target); } catch (JsonProcessingException e) { throw new IllegalArgumentException(e); } } }
orchestrator-salt/src/main/java/com/sequenceiq/cloudbreak/orchestrator/salt/client/SaltConnector.java
package com.sequenceiq.cloudbreak.orchestrator.salt.client; import static com.sequenceiq.cloudbreak.orchestrator.salt.client.SaltEndpoint.BOOT_HOSTNAME_ENDPOINT; import static java.util.Collections.singletonMap; import java.io.ByteArrayInputStream; import java.io.Closeable; import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import javax.ws.rs.client.Client; import javax.ws.rs.client.Entity; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.Form; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.apache.http.HttpStatus; import org.glassfish.jersey.client.authentication.HttpAuthenticationFeature; import org.glassfish.jersey.media.multipart.Boundary; import org.glassfish.jersey.media.multipart.FormDataMultiPart; import org.glassfish.jersey.media.multipart.MultiPart; import org.glassfish.jersey.media.multipart.file.StreamDataBodyPart; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.sequenceiq.cloudbreak.client.RestClientUtil; import com.sequenceiq.cloudbreak.client.PkiUtil; import com.sequenceiq.cloudbreak.orchestrator.exception.CloudbreakOrchestratorFailedException; import com.sequenceiq.cloudbreak.orchestrator.model.GatewayConfig; import com.sequenceiq.cloudbreak.orchestrator.model.GenericResponse; import com.sequenceiq.cloudbreak.orchestrator.model.GenericResponses; import com.sequenceiq.cloudbreak.orchestrator.salt.client.target.Target; import com.sequenceiq.cloudbreak.orchestrator.salt.domain.Pillar; import com.sequenceiq.cloudbreak.orchestrator.salt.domain.SaltAction; import com.sequenceiq.cloudbreak.util.JaxRSUtil; public class SaltConnector implements Closeable { private static final Logger LOGGER = LoggerFactory.getLogger(SaltConnector.class); private static final ObjectMapper MAPPER = new ObjectMapper(); private static final String SALT_USER = "saltuser"; private static final String SALT_PASSWORD = "saltpass"; private static final String SALT_BOOT_USER = "cbadmin"; private static final String SALT_BOOT_PASSWORD = "cbadmin"; private static final String SIGN_HEADER = "signature"; private static final List<Integer> ACCEPTED_STATUSES = Arrays.asList(HttpStatus.SC_OK, HttpStatus.SC_CREATED, HttpStatus.SC_ACCEPTED); private final Client restClient; private final WebTarget saltTarget; private final String saltPassword; private final String signatureKey; public SaltConnector(GatewayConfig gatewayConfig, boolean debug) { try { this.restClient = RestClientUtil.createClient( gatewayConfig.getServerCert(), gatewayConfig.getClientCert(), gatewayConfig.getClientKey(), debug, SaltConnector.class); String saltBootPasswd = Optional.ofNullable(gatewayConfig.getSaltBootPassword()).orElse(SALT_BOOT_PASSWORD); HttpAuthenticationFeature feature = HttpAuthenticationFeature.basic(SALT_BOOT_USER, saltBootPasswd); this.saltTarget = restClient.target(gatewayConfig.getGatewayUrl()).register(feature); this.saltPassword = Optional.ofNullable(gatewayConfig.getSaltPassword()).orElse(SALT_PASSWORD); this.signatureKey = gatewayConfig.getSignatureKey(); } catch (Exception e) { throw new RuntimeException("Failed to create rest client with 2-way-ssl config", e); } } public GenericResponse health() { Response response = saltTarget.path(SaltEndpoint.BOOT_HEALTH.getContextPath()).request().get(); GenericResponse responseEntity = JaxRSUtil.response(response, GenericResponse.class); LOGGER.info("Health response: {}", responseEntity); return responseEntity; } public GenericResponses pillar(Set<String> targets, Pillar pillar) { Response distributeResponse = saltTarget.path(SaltEndpoint.BOOT_PILLAR_DISTRIBUTE.getContextPath()).request() .header(SIGN_HEADER, PkiUtil.generateSignature(signatureKey, toJson(pillar).getBytes())) .post(Entity.json(pillar)); if (distributeResponse.getStatus() == HttpStatus.SC_NOT_FOUND) { // simple pillar save for CB <= 1.14 Response singleResponse = saltTarget.path(SaltEndpoint.BOOT_PILLAR_SAVE.getContextPath()).request() .header(SIGN_HEADER, PkiUtil.generateSignature(signatureKey, toJson(pillar).getBytes())) .post(Entity.json(pillar)); GenericResponses genericResponses = new GenericResponses(); GenericResponse genericResponse = new GenericResponse(); genericResponse.setAddress(targets.iterator().next()); genericResponse.setStatusCode(singleResponse.getStatus()); genericResponses.setResponses(Collections.singletonList(genericResponse)); return genericResponses; } return JaxRSUtil.response(distributeResponse, GenericResponses.class); } public GenericResponses action(SaltAction saltAction) { Response response = saltTarget.path(SaltEndpoint.BOOT_ACTION_DISTRIBUTE.getContextPath()).request() .header(SIGN_HEADER, PkiUtil.generateSignature(signatureKey, toJson(saltAction).getBytes())) .post(Entity.json(saltAction)); GenericResponses responseEntity = JaxRSUtil.response(response, GenericResponses.class); LOGGER.info("SaltAction response: {}", responseEntity); return responseEntity; } public <T> T run(Target<String> target, String fun, SaltClientType clientType, Class<T> clazz, String... arg) { Form form = new Form(); form = addAuth(form) .param("fun", fun) .param("client", clientType.getType()) .param("tgt", target.getTarget()) .param("expr_form", target.getType()); if (arg != null) { if (clientType.equals(SaltClientType.LOCAL) || clientType.equals(SaltClientType.LOCAL_ASYNC)) { for (String a : arg) { form.param("arg", a); } } else { for (int i = 0; i < arg.length - 1; i = i + 2) { form.param(arg[i], arg[i + 1]); } } } Response response = saltTarget.path(SaltEndpoint.SALT_RUN.getContextPath()).request() .header(SIGN_HEADER, PkiUtil.generateSignature(signatureKey, toJson(form.asMap()).getBytes())) .post(Entity.form(form)); T responseEntity = JaxRSUtil.response(response, clazz); LOGGER.info("Salt run response: {}", responseEntity); return responseEntity; } public <T> T wheel(String fun, Collection<String> match, Class<T> clazz) { Form form = new Form(); form = addAuth(form) .param("fun", fun) .param("client", "wheel"); if (match != null && !match.isEmpty()) { form.param("match", match.stream().collect(Collectors.joining(","))); } Response response = saltTarget.path(SaltEndpoint.SALT_RUN.getContextPath()).request() .header(SIGN_HEADER, PkiUtil.generateSignature(signatureKey, toJson(form.asMap()).getBytes())) .post(Entity.form(form)); T responseEntity = JaxRSUtil.response(response, clazz); LOGGER.info("SaltAction response: {}", responseEntity); return responseEntity; } public GenericResponses upload(Set<String> targets, String path, String fileName, byte[] content) throws IOException { Response distributeResponse = upload(SaltEndpoint.BOOT_FILE_DISTRIBUTE.getContextPath(), targets, path, fileName, content); if (distributeResponse.getStatus() == HttpStatus.SC_NOT_FOUND) { // simple file upload for CB <= 1.14 Response singleResponse = upload(SaltEndpoint.BOOT_FILE_UPLOAD.getContextPath(), targets, path, fileName, content); GenericResponses genericResponses = new GenericResponses(); GenericResponse genericResponse = new GenericResponse(); genericResponse.setAddress(targets.iterator().next()); genericResponse.setStatusCode(singleResponse.getStatus()); genericResponses.setResponses(Collections.singletonList(genericResponse)); return genericResponses; } return JaxRSUtil.response(distributeResponse, GenericResponses.class); } private Response upload(String endpoint, Set<String> targets, String path, String fileName, byte[] content) throws IOException { try (ByteArrayInputStream inputStream = new ByteArrayInputStream(content)) { StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", inputStream, fileName); MultiPart multiPart = new FormDataMultiPart().field("path", path).field("targets", String.join(",", targets)).bodyPart(streamDataBodyPart); MediaType contentType = MediaType.MULTIPART_FORM_DATA_TYPE; contentType = Boundary.addBoundary(contentType); String signature = PkiUtil.generateSignature(signatureKey, content); return saltTarget.path(endpoint).request().header(SIGN_HEADER, signature).post(Entity.entity(multiPart, contentType)); } } public Map<String, String> members(List<String> privateIps) throws CloudbreakOrchestratorFailedException { Map<String, List<String>> clients = singletonMap("clients", privateIps); Response response = saltTarget.path(BOOT_HOSTNAME_ENDPOINT.getContextPath()).request() .header(SIGN_HEADER, PkiUtil.generateSignature(signatureKey, toJson(clients).getBytes())) .post(Entity.json(clients)); GenericResponses responses = JaxRSUtil.response(response, GenericResponses.class); List<GenericResponse> failedResponses = responses.getResponses().stream() .filter(genericResponse -> !ACCEPTED_STATUSES.contains(genericResponse.getStatusCode())).collect(Collectors.toList()); if (!failedResponses.isEmpty()) { failedResponseErrorLog(failedResponses); String failedNodeAddresses = failedResponses.stream().map(GenericResponse::getAddress).collect(Collectors.joining(",")); throw new CloudbreakOrchestratorFailedException("Hostname resolution failed for nodes: " + failedNodeAddresses); } return responses.getResponses().stream().collect(Collectors.toMap(GenericResponse::getAddress, GenericResponse::getStatus)); } private void failedResponseErrorLog(List<GenericResponse> failedResponses) { StringBuilder failedResponsesErrorMessage = new StringBuilder(); failedResponsesErrorMessage.append("Failed response from salt bootstrap, endpoint: ").append(BOOT_HOSTNAME_ENDPOINT); for (GenericResponse failedResponse : failedResponses) { failedResponsesErrorMessage.append("\n").append("Status code: ").append(failedResponse.getStatusCode()); failedResponsesErrorMessage.append(" Error message: ").append(failedResponse.getStatus()); } LOGGER.error(failedResponsesErrorMessage.toString()); } private Form addAuth(Form form) { form.param("username", SALT_USER) .param("password", saltPassword) .param("eauth", "pam"); return form; } @Override public void close() throws IOException { if (restClient != null) { restClient.close(); } } public String getSaltPassword() { return saltPassword; } private String toJson(Object target) { try { return MAPPER.writeValueAsString(target); } catch (JsonProcessingException e) { throw new IllegalArgumentException(e); } } }
CLOUD-78751 Close saltconnector.upload jersey responses properly
orchestrator-salt/src/main/java/com/sequenceiq/cloudbreak/orchestrator/salt/client/SaltConnector.java
CLOUD-78751 Close saltconnector.upload jersey responses properly
Java
apache-2.0
483599db59f2a595401294834d64e55135c7610b
0
TKnudsen/ComplexDataObject
package com.github.TKnudsen.ComplexDataObject.data.uncertainty.Double; import java.util.Collection; import com.github.TKnudsen.ComplexDataObject.data.uncertainty.IUncertaintyQuantitative; import com.github.TKnudsen.ComplexDataObject.model.tools.StatisticsSupport; /** * <p> * Title: NumericalUncertainty * </p> * * <p> * Description: data model for uncertainties of numerical values. * </p> * * <p> * Copyright: Copyright (c) 2015-2017 * </p> * * @author Juergen Bernard * @version 1.02 */ public class NumericalUncertainty implements IUncertaintyQuantitative<Double> { private Double min; private Double max; private Double representant; private double variation; public NumericalUncertainty(Collection<? extends Double> values) { initialize(values); } private void initialize(Collection<? extends Double> values) { StatisticsSupport statisticsSupport = new StatisticsSupport(values); this.min = statisticsSupport.getMin(); this.max = statisticsSupport.getMax(); this.representant = statisticsSupport.getMedian(); this.variation = statisticsSupport.getVariance(); } @Override public String toString() { return "NumericalUncertainty. min, max, representant, variation: " + min + ", " + max + ", " + representant + ", " + variation; } @Override public Double getMostCertainRepresentant() { return representant; } @Override public Double getMinimum() { return min; } @Override public Double getMaximum() { return max; } @Override public double getVariation() { return variation; } }
src/main/java/com/github/TKnudsen/ComplexDataObject/data/uncertainty/Double/NumericalUncertainty.java
package com.github.TKnudsen.ComplexDataObject.data.uncertainty.Double; import java.util.Collection; import com.github.TKnudsen.ComplexDataObject.data.uncertainty.IUncertaintyQuantitative; import com.github.TKnudsen.ComplexDataObject.model.tools.StatisticsSupport; /** * <p> * Title: NumericalUncertainty * </p> * * <p> * Description: data model for uncertainties of numerical values. * </p> * * <p> * Copyright: Copyright (c) 2015-2017 * </p> * * @author Juergen Bernard * @version 1.02 */ public class NumericalUncertainty implements IUncertaintyQuantitative<Double> { private Double min; private Double max; private Double representant; private double variation; public NumericalUncertainty(Collection<? extends Double> values) { initialize(values); } private void initialize(Collection<? extends Double> values) { StatisticsSupport statisticsSupport = new StatisticsSupport(values); this.min = statisticsSupport.getMin(); this.max = statisticsSupport.getMax(); this.representant = statisticsSupport.getMedian(); this.variation = statisticsSupport.getVariance(); } @Override public Double getMostCertainRepresentant() { return representant; } @Override public Double getMinimum() { return min; } @Override public Double getMaximum() { return max; } @Override public double getVariation() { return variation; } }
minor changes to NumericalUncertainty Double
src/main/java/com/github/TKnudsen/ComplexDataObject/data/uncertainty/Double/NumericalUncertainty.java
minor changes to NumericalUncertainty Double
Java
apache-2.0
a5bd49a04c33de5659117e3f3cf35b96ffa86a52
0
tokee/lucene,tokee/lucene,tokee/lucene,tokee/lucene
package org.apache.lucene.search.trie; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.Date; import org.apache.lucene.search.Filter; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.TermDocs; import org.apache.lucene.index.TermEnum; import org.apache.lucene.index.Term; import org.apache.lucene.util.OpenBitSet; /** * Implementation of a Lucene {@link Filter} that implements trie-based range filtering. * This filter depends on a specific structure of terms in the index that can only be created * by {@link TrieUtils} methods. * For more information, how the algorithm works, see the package description {@link org.apache.lucene.search.trie}. */ public final class TrieRangeFilter extends Filter { /** * Universal constructor (expert use only): Uses already trie-converted min/max values. * You can set <code>min</code> or <code>max</code> (but not both) to <code>null</code> to leave one bound open. */ public TrieRangeFilter(final String field, final String min, final String max, final TrieUtils variant) { if (min==null && max==null) throw new IllegalArgumentException("The min and max values cannot be both null."); this.trieVariant=variant; this.minUnconverted=min; this.maxUnconverted=max; this.min=(min==null) ? trieVariant.TRIE_CODED_NUMERIC_MIN : min; this.max=(max==null) ? trieVariant.TRIE_CODED_NUMERIC_MAX : max; this.field=field.intern(); } /** * Universal constructor (expert use only): Uses already trie-converted min/max values. * You can set <code>min</code> or <code>max</code> (but not both) to <code>null</code> to leave one bound open. * <p>This constructor uses the trie package returned by {@link TrieUtils#getDefaultTrieVariant()}. */ public TrieRangeFilter(final String field, final String min, final String max) { this(field,min,max,TrieUtils.getDefaultTrieVariant()); } /** * Generates a trie query using the supplied field with range bounds in numeric form (double). * You can set <code>min</code> or <code>max</code> (but not both) to <code>null</code> to leave one bound open. */ public TrieRangeFilter(final String field, final Double min, final Double max, final TrieUtils variant) { this( field, (min==null) ? null : variant.doubleToTrieCoded(min.doubleValue()), (max==null) ? null : variant.doubleToTrieCoded(max.doubleValue()), variant ); this.minUnconverted=min; this.maxUnconverted=max; } /** * Generates a trie query using the supplied field with range bounds in numeric form (double). * You can set <code>min</code> or <code>max</code> (but not both) to <code>null</code> to leave one bound open. * <p>This constructor uses the trie variant returned by {@link TrieUtils#getDefaultTrieVariant()}. */ public TrieRangeFilter(final String field, final Double min, final Double max) { this(field,min,max,TrieUtils.getDefaultTrieVariant()); } /** * Generates a trie query using the supplied field with range bounds in date/time form. * You can set <code>min</code> or <code>max</code> (but not both) to <code>null</code> to leave one bound open. */ public TrieRangeFilter(final String field, final Date min, final Date max, final TrieUtils variant) { this( field, (min==null) ? null : variant.dateToTrieCoded(min), (max==null) ? null : variant.dateToTrieCoded(max), variant ); this.minUnconverted=min; this.maxUnconverted=max; } /** * Generates a trie query using the supplied field with range bounds in date/time form. * You can set <code>min</code> or <code>max</code> (but not both) to <code>null</code> to leave one bound open. * <p>This constructor uses the trie variant returned by {@link TrieUtils#getDefaultTrieVariant()}. */ public TrieRangeFilter(final String field, final Date min, final Date max) { this(field,min,max,TrieUtils.getDefaultTrieVariant()); } /** * Generates a trie query using the supplied field with range bounds in integer form (long). * You can set <code>min</code> or <code>max</code> (but not both) to <code>null</code> to leave one bound open. */ public TrieRangeFilter(final String field, final Long min, final Long max, final TrieUtils variant) { this( field, (min==null) ? null : variant.longToTrieCoded(min.longValue()), (max==null) ? null : variant.longToTrieCoded(max.longValue()), variant ); this.minUnconverted=min; this.maxUnconverted=max; } /** * Generates a trie query using the supplied field with range bounds in integer form (long). * You can set <code>min</code> or <code>max</code> (but not both) to <code>null</code> to leave one bound open. * <p>This constructor uses the trie variant returned by {@link TrieUtils#getDefaultTrieVariant()}. */ public TrieRangeFilter(final String field, final Long min, final Long max) { this(field,min,max,TrieUtils.getDefaultTrieVariant()); } //@Override public String toString() { return toString(null); } public String toString(final String field) { final StringBuffer sb=new StringBuffer(); if (!this.field.equals(field)) sb.append(this.field).append(':'); return sb.append('[').append(minUnconverted).append(" TO ").append(maxUnconverted).append(']').toString(); } //@Override public final boolean equals(final Object o) { if (o instanceof TrieRangeFilter) { TrieRangeFilter q=(TrieRangeFilter)o; // trieVariants are singleton per type, so no equals needed return (field==q.field && min.equals(q.min) && max.equals(q.max) && trieVariant==q.trieVariant); } else return false; } //@Override public final int hashCode() { // the hash code uses from the variant only the number of bits, as this is unique for the variant return field.hashCode()+(min.hashCode()^0x14fa55fb)+(max.hashCode()^0x733fa5fe)+(trieVariant.TRIE_BITS^0x64365465); } /** prints the String in hexadecimal \\u notation (for debugging of <code>setBits()</code>) */ private String stringToHexDigits(final String s) { StringBuffer sb=new StringBuffer(s.length()*3); for (int i=0,c=s.length(); i<c; i++) { char ch=s.charAt(i); sb.append("\\u").append(Integer.toHexString((int)ch)); } return sb.toString(); } /** Marks documents in a specific range. Code borrowed from original RangeFilter and simplified (and returns number of terms) */ private int setBits(final IndexReader reader, final TermDocs termDocs, final OpenBitSet bits, String lowerTerm, String upperTerm) throws IOException { //System.out.println(stringToHexDigits(lowerTerm)+" TO "+stringToHexDigits(upperTerm)); int count=0,len=lowerTerm.length(); final String field; if (len<trieVariant.TRIE_CODED_LENGTH) { // lower precision value is in helper field field=(this.field + trieVariant.LOWER_PRECISION_FIELD_NAME_SUFFIX).intern(); // add padding before lower precision values to group them lowerTerm=new StringBuffer(len+1).append((char)(trieVariant.TRIE_CODED_PADDING_START+len)).append(lowerTerm).toString(); upperTerm=new StringBuffer(len+1).append((char)(trieVariant.TRIE_CODED_PADDING_START+len)).append(upperTerm).toString(); // length is longer by 1 char because of padding len++; } else { // full precision value is in original field field=this.field; } final TermEnum enumerator = reader.terms(new Term(field, lowerTerm)); try { do { final Term term = enumerator.term(); if (term!=null && term.field()==field) { // break out when upperTerm reached or length of term is different final String t=term.text(); if (len!=t.length() || t.compareTo(upperTerm)>0) break; // we have a good term, find the docs count++; termDocs.seek(enumerator); while (termDocs.next()) bits.set(termDocs.doc()); } else break; } while (enumerator.next()); } finally { enumerator.close(); } return count; } /** Splits range recursively (and returns number of terms) */ private int splitRange( final IndexReader reader, final TermDocs termDocs, final OpenBitSet bits, final String min, final boolean lowerBoundOpen, final String max, final boolean upperBoundOpen ) throws IOException { int count=0; final int length=min.length(); final String minShort=lowerBoundOpen ? min.substring(0,length-1) : trieVariant.incrementTrieCoded(min.substring(0,length-1)); final String maxShort=upperBoundOpen ? max.substring(0,length-1) : trieVariant.decrementTrieCoded(max.substring(0,length-1)); if (length==1 || minShort.compareTo(maxShort)>=0) { // we are in the lowest precision or the current precision is not existent count+=setBits(reader, termDocs, bits, min, max); } else { // Avoid too much seeking: first go deeper into lower precision // (in IndexReader's TermEnum these terms are earlier). // Do this only, if the current length is not trieVariant.TRIE_CODED_LENGTH (not full precision), // because terms from the highest prec come before all lower prec terms // (because the field name is ordered before the suffixed one). if (length!=trieVariant.TRIE_CODED_LENGTH) count+=splitRange( reader,termDocs,bits, minShort,lowerBoundOpen, maxShort,upperBoundOpen ); // Avoid too much seeking: set bits for lower part of current (higher) precision. // These terms come later in IndexReader's TermEnum. if (!lowerBoundOpen) { count+=setBits(reader, termDocs, bits, min, trieVariant.decrementTrieCoded(minShort+trieVariant.TRIE_CODED_SYMBOL_MIN)); } // Avoid too much seeking: set bits for upper part of current precision. // These terms come later in IndexReader's TermEnum. if (!upperBoundOpen) { count+=setBits(reader, termDocs, bits, trieVariant.incrementTrieCoded(maxShort+trieVariant.TRIE_CODED_SYMBOL_MAX), max); } // If the first step (see above) was not done (because length==trieVariant.TRIE_CODED_LENGTH) we do it now. if (length==trieVariant.TRIE_CODED_LENGTH) count+=splitRange( reader,termDocs,bits, minShort,lowerBoundOpen, maxShort,upperBoundOpen ); } return count; } /** * Returns a DocIdSet that provides the documents which should be permitted or prohibited in search results. */ //@Override public DocIdSet getDocIdSet(IndexReader reader) throws IOException { final OpenBitSet bits = new OpenBitSet(reader.maxDoc()); final TermDocs termDocs = reader.termDocs(); try { lastNumberOfTerms=splitRange( reader,termDocs,bits, min,trieVariant.TRIE_CODED_NUMERIC_MIN.equals(min), max,trieVariant.TRIE_CODED_NUMERIC_MAX.equals(max) ); //System.out.println("Found "+lastNumberOfTerms+" distinct terms in filtered range for field '"+field+"'."); } finally { termDocs.close(); } return bits; } /** * EXPERT: Return the number of terms visited during the last execution of {@link #getDocIdSet}. * This may be used for performance comparisons of different trie variants and their effectiveness. * This method is not thread safe, be sure to only call it when no query is running! * @throws IllegalStateException if {@link #getDocIdSet} was not yet executed. */ public int getLastNumberOfTerms() { if (lastNumberOfTerms < 0) throw new IllegalStateException(); return lastNumberOfTerms; } // members private final String field,min,max; private final TrieUtils trieVariant; private Object minUnconverted,maxUnconverted; private int lastNumberOfTerms=-1; }
contrib/queries/src/java/org/apache/lucene/search/trie/TrieRangeFilter.java
package org.apache.lucene.search.trie; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.Date; import org.apache.lucene.search.Filter; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.TermDocs; import org.apache.lucene.index.TermEnum; import org.apache.lucene.index.Term; import org.apache.lucene.util.OpenBitSet; /** * Implementation of a Lucene {@link Filter} that implements trie-based range filtering. * This filter depends on a specific structure of terms in the index that can only be created * by {@link TrieUtils} methods. * For more information, how the algorithm works, see the package description {@link org.apache.lucene.search.trie}. */ public final class TrieRangeFilter extends Filter { /** * Universal constructor (expert use only): Uses already trie-converted min/max values. * You can set <code>min</code> or <code>max</code> (but not both) to <code>null</code> to leave one bound open. */ public TrieRangeFilter(final String field, final String min, final String max, final TrieUtils variant) { if (min==null && max==null) throw new IllegalArgumentException("The min and max values cannot be both null."); this.trieVariant=variant; this.minUnconverted=min; this.maxUnconverted=max; this.min=(min==null) ? trieVariant.TRIE_CODED_NUMERIC_MIN : min; this.max=(max==null) ? trieVariant.TRIE_CODED_NUMERIC_MAX : max; this.field=field.intern(); } /** * Universal constructor (expert use only): Uses already trie-converted min/max values. * You can set <code>min</code> or <code>max</code> (but not both) to <code>null</code> to leave one bound open. * <p>This constructor uses the trie package returned by {@link TrieUtils#getDefaultTrieVariant()}. */ public TrieRangeFilter(final String field, final String min, final String max) { this(field,min,max,TrieUtils.getDefaultTrieVariant()); } /** * Generates a trie query using the supplied field with range bounds in numeric form (double). * You can set <code>min</code> or <code>max</code> (but not both) to <code>null</code> to leave one bound open. */ public TrieRangeFilter(final String field, final Double min, final Double max, final TrieUtils variant) { this( field, (min==null) ? null : variant.doubleToTrieCoded(min.doubleValue()), (max==null) ? null : variant.doubleToTrieCoded(max.doubleValue()), variant ); this.minUnconverted=min; this.maxUnconverted=max; } /** * Generates a trie query using the supplied field with range bounds in numeric form (double). * You can set <code>min</code> or <code>max</code> (but not both) to <code>null</code> to leave one bound open. * <p>This constructor uses the trie variant returned by {@link TrieUtils#getDefaultTrieVariant()}. */ public TrieRangeFilter(final String field, final Double min, final Double max) { this(field,min,max,TrieUtils.getDefaultTrieVariant()); } /** * Generates a trie query using the supplied field with range bounds in date/time form. * You can set <code>min</code> or <code>max</code> (but not both) to <code>null</code> to leave one bound open. */ public TrieRangeFilter(final String field, final Date min, final Date max, final TrieUtils variant) { this( field, (min==null) ? null : variant.dateToTrieCoded(min), (max==null) ? null : variant.dateToTrieCoded(max), variant ); this.minUnconverted=min; this.maxUnconverted=max; } /** * Generates a trie query using the supplied field with range bounds in date/time form. * You can set <code>min</code> or <code>max</code> (but not both) to <code>null</code> to leave one bound open. * <p>This constructor uses the trie variant returned by {@link TrieUtils#getDefaultTrieVariant()}. */ public TrieRangeFilter(final String field, final Date min, final Date max) { this(field,min,max,TrieUtils.getDefaultTrieVariant()); } /** * Generates a trie query using the supplied field with range bounds in integer form (long). * You can set <code>min</code> or <code>max</code> (but not both) to <code>null</code> to leave one bound open. */ public TrieRangeFilter(final String field, final Long min, final Long max, final TrieUtils variant) { this( field, (min==null) ? null : variant.longToTrieCoded(min.longValue()), (max==null) ? null : variant.longToTrieCoded(max.longValue()), variant ); this.minUnconverted=min; this.maxUnconverted=max; } /** * Generates a trie query using the supplied field with range bounds in integer form (long). * You can set <code>min</code> or <code>max</code> (but not both) to <code>null</code> to leave one bound open. * <p>This constructor uses the trie variant returned by {@link TrieUtils#getDefaultTrieVariant()}. */ public TrieRangeFilter(final String field, final Long min, final Long max) { this(field,min,max,TrieUtils.getDefaultTrieVariant()); } //@Override public String toString() { return toString(null); } public String toString(final String field) { final StringBuffer sb=new StringBuffer(); if (!this.field.equals(field)) sb.append(this.field).append(':'); return sb.append('[').append(minUnconverted).append(" TO ").append(maxUnconverted).append(']').toString(); } //@Override public final boolean equals(final Object o) { if (o instanceof TrieRangeFilter) { TrieRangeFilter q=(TrieRangeFilter)o; // trieVariants are singleton per type, so no equals needed return (field==q.field && min.equals(q.min) && max.equals(q.max) && trieVariant==q.trieVariant); } else return false; } //@Override public final int hashCode() { // the hash code uses from the variant only the number of bits, as this is unique for the variant return field.hashCode()+(min.hashCode()^0x14fa55fb)+(max.hashCode()^0x733fa5fe)+(trieVariant.TRIE_BITS^0x64365465); } /** prints the String in hexadecimal \\u notation (for debugging of <code>setBits()</code>) */ private String stringToHexDigits(final String s) { StringBuffer sb=new StringBuffer(s.length()*3); for (int i=0,c=s.length(); i<c; i++) { char ch=s.charAt(i); sb.append("\\u").append(Integer.toHexString((int)ch)); } return sb.toString(); } /** Marks documents in a specific range. Code borrowed from original RangeFilter and simplified (and returns number of terms) */ private int setBits(final IndexReader reader, final TermDocs termDocs, final OpenBitSet bits, String lowerTerm, String upperTerm) throws IOException { //System.out.println(stringToHexDigits(lowerTerm)+" TO "+stringToHexDigits(upperTerm)); int count=0,len=lowerTerm.length(); final String field; if (len<trieVariant.TRIE_CODED_LENGTH) { // lower precision value is in helper field field=(this.field + trieVariant.LOWER_PRECISION_FIELD_NAME_SUFFIX).intern(); // add padding before lower precision values to group them lowerTerm=new StringBuffer(len+1).append((char)(trieVariant.TRIE_CODED_PADDING_START+len)).append(lowerTerm).toString(); upperTerm=new StringBuffer(len+1).append((char)(trieVariant.TRIE_CODED_PADDING_START+len)).append(upperTerm).toString(); // length is longer by 1 char because of padding len++; } else { // full precision value is in original field field=this.field; } final TermEnum enumerator = reader.terms(new Term(field, lowerTerm)); try { do { final Term term = enumerator.term(); if (term!=null && term.field()==field) { // break out when upperTerm reached or length of term is different final String t=term.text(); if (len!=t.length() || t.compareTo(upperTerm)>0) break; // we have a good term, find the docs count++; termDocs.seek(enumerator); while (termDocs.next()) bits.set(termDocs.doc()); } else break; } while (enumerator.next()); } finally { enumerator.close(); } return count; } /** Splits range recursively (and returns number of terms) */ private int splitRange( final IndexReader reader, final TermDocs termDocs, final OpenBitSet bits, final String min, final boolean lowerBoundOpen, final String max, final boolean upperBoundOpen ) throws IOException { int count=0; final int length=min.length(); final String minShort=lowerBoundOpen ? min.substring(0,length-1) : trieVariant.incrementTrieCoded(min.substring(0,length-1)); final String maxShort=upperBoundOpen ? max.substring(0,length-1) : trieVariant.decrementTrieCoded(max.substring(0,length-1)); if (length==1 || minShort.compareTo(maxShort)>=0) { // we are in the lowest precision or the current precision is not existent count+=setBits(reader, termDocs, bits, min, max); } else { // Avoid too much seeking: first go deeper into lower precision // (in IndexReader's TermEnum these terms are earlier). // Do this only, if the current length is not trieVariant.TRIE_CODED_LENGTH (not full precision), // because terms from the highest prec come before all lower prec terms // (because the field name is ordered before the suffixed one). if (length!=trieVariant.TRIE_CODED_LENGTH) count+=splitRange( reader,termDocs,bits, minShort,lowerBoundOpen, maxShort,upperBoundOpen ); // Avoid too much seeking: set bits for lower part of current (higher) precision. // These terms come later in IndexReader's TermEnum. if (!lowerBoundOpen) { count+=setBits(reader, termDocs, bits, min, trieVariant.decrementTrieCoded(minShort+trieVariant.TRIE_CODED_SYMBOL_MIN)); } // Avoid too much seeking: set bits for upper part of current precision. // These terms come later in IndexReader's TermEnum. if (!upperBoundOpen) { count+=setBits(reader, termDocs, bits, trieVariant.incrementTrieCoded(maxShort+trieVariant.TRIE_CODED_SYMBOL_MAX), max); } // If the first step (see above) was not done (because length==trieVariant.TRIE_CODED_LENGTH) we do it now. if (length==trieVariant.TRIE_CODED_LENGTH) count+=splitRange( reader,termDocs,bits, minShort,lowerBoundOpen, maxShort,upperBoundOpen ); } return count; } /** * Returns a DocIdSet that provides the documents which should be permitted or prohibited in search results. */ //@Override public DocIdSet getDocIdSet(IndexReader reader) throws IOException { final OpenBitSet bits = new OpenBitSet(reader.maxDoc()); final TermDocs termDocs=reader.termDocs(); try { final int count=splitRange( reader,termDocs,bits, min,trieVariant.TRIE_CODED_NUMERIC_MIN.equals(min), max,trieVariant.TRIE_CODED_NUMERIC_MAX.equals(max) ); lastNumberOfTerms=new Integer(count); //System.out.println("Found "+count+" distinct terms in filtered range for field '"+field+"'."); } finally { termDocs.close(); } return bits; } /** * EXPERT: Return the number of terms visited during the last execution of {@link #getDocIdSet}. * This may be used for performance comparisons of different trie variants and their effectiveness. * This method is not thread safe, be sure to only call it when no query is running! * @throws IllegalStateException if {@link #getDocIdSet} was not yet executed. */ //@Override public int getLastNumberOfTerms() { if (lastNumberOfTerms==null) throw new IllegalStateException(); return lastNumberOfTerms.intValue(); } // members private final String field,min,max; private final TrieUtils trieVariant; private Object minUnconverted,maxUnconverted; private Integer lastNumberOfTerms=null; }
Small optimization because an Object is not needed here git-svn-id: 4c5078813df38efa56971a28e09a55254294f104@733449 13f79535-47bb-0310-9956-ffa450edef68
contrib/queries/src/java/org/apache/lucene/search/trie/TrieRangeFilter.java
Small optimization because an Object is not needed here
Java
apache-2.0
37aa87bf64b2a585076ebdfc6890f12229592923
0
eldevanjr/nfe,danieldhp/nfe,wmixvideo/nfe,caiocteodoro/nfe,jefperito/nfe,fincatto/nfe
package com.fincatto.nfe310.parsers; import java.io.File; import org.simpleframework.xml.core.Persister; import com.fincatto.nfe310.classes.lote.envio.NFLoteEnvio; import com.fincatto.nfe310.classes.nota.NFNota; import com.fincatto.nfe310.classes.nota.NFNotaInfoItem; import com.fincatto.nfe310.classes.nota.NFNotaProcessada; import com.fincatto.nfe310.persister.NFPersister; public class NotaParser { private final Persister persister; public NotaParser() { this.persister = new NFPersister(); } public NFNota notaParaObjeto(final File xml) { try { return this.persister.read(NFNota.class, xml); } catch (final Exception e) { throw new IllegalArgumentException(String.format("Nao foi possivel parsear o xml: %s", e.getMessage())); } } public NFNota notaParaObjeto(final String xml) { try { return this.persister.read(NFNota.class, xml); } catch (final Exception e) { throw new IllegalArgumentException(String.format("Nao foi possivel parsear o xml: %s", e.getMessage())); } } public NFLoteEnvio loteParaObjeto(final File xml) { try { return this.persister.read(NFLoteEnvio.class, xml); } catch (final Exception e) { throw new IllegalArgumentException(String.format("Nao foi possivel parsear o xml: %s", e.getMessage())); } } public NFLoteEnvio loteParaObjeto(final String xml) { try { return this.persister.read(NFLoteEnvio.class, xml); } catch (final Exception e) { throw new IllegalArgumentException(String.format("Nao foi possivel parsear o xml: %s", e.getMessage())); } } public NFNotaProcessada notaProcessadaParaObjeto(final String xml) { try { return this.persister.read(NFNotaProcessada.class, xml); } catch (final Exception e) { throw new IllegalArgumentException(String.format("Nao foi possivel parsear o xml: %s", e.getMessage())); } } public NFNotaProcessada notaProcessadaParaObjeto(final File xml) { try { return this.persister.read(NFNotaProcessada.class, xml); } catch (final Exception e) { throw new IllegalArgumentException(String.format("Nao foi possivel parsear o xml: %s", e.getMessage())); } } public NFNotaInfoItem notaInfoItemParaObjeto(final String xml) { try { return this.persister.read(NFNotaInfoItem.class, xml); } catch (final Exception e) { throw new IllegalArgumentException(String.format("Nao foi possivel parsear o xml: %s", e.getMessage())); } } }
src/main/java/com/fincatto/nfe310/parsers/NotaParser.java
package com.fincatto.nfe310.parsers; import java.io.File; import org.simpleframework.xml.core.Persister; import com.fincatto.nfe310.classes.lote.envio.NFLoteEnvio; import com.fincatto.nfe310.classes.nota.NFNota; import com.fincatto.nfe310.classes.nota.NFNotaProcessada; import com.fincatto.nfe310.persister.NFPersister; public class NotaParser { private final Persister persister; public NotaParser() { this.persister = new NFPersister(); } public NFNota notaParaObjeto(final File xml) { try { return this.persister.read(NFNota.class, xml); } catch (final Exception e) { throw new IllegalArgumentException(String.format("Nao foi possivel parsear o xml: %s", e.getMessage())); } } public NFNota notaParaObjeto(final String xml) { try { return this.persister.read(NFNota.class, xml); } catch (final Exception e) { throw new IllegalArgumentException(String.format("Nao foi possivel parsear o xml: %s", e.getMessage())); } } public NFLoteEnvio loteParaObjeto(final File xml) { try { return this.persister.read(NFLoteEnvio.class, xml); } catch (final Exception e) { throw new IllegalArgumentException(String.format("Nao foi possivel parsear o xml: %s", e.getMessage())); } } public NFLoteEnvio loteParaObjeto(final String xml) { try { return this.persister.read(NFLoteEnvio.class, xml); } catch (final Exception e) { throw new IllegalArgumentException(String.format("Nao foi possivel parsear o xml: %s", e.getMessage())); } } public NFNotaProcessada notaProcessadaParaObjeto(final String xml) { try { return this.persister.read(NFNotaProcessada.class, xml); } catch (final Exception e) { throw new IllegalArgumentException(String.format("Nao foi possivel parsear o xml: %s", e.getMessage())); } } public NFNotaProcessada notaProcessadaParaObjeto(final File xml) { try { return this.persister.read(NFNotaProcessada.class, xml); } catch (final Exception e) { throw new IllegalArgumentException(String.format("Nao foi possivel parsear o xml: %s", e.getMessage())); } } }
Nota parser add new method
src/main/java/com/fincatto/nfe310/parsers/NotaParser.java
Nota parser add new method
Java
apache-2.0
88ccadf49171aa74e2b3cb87b1ce3a8484398519
0
RaviKumar7443/JPETSTORE,RaviKumar7443/JPETSTORE,RaviKumar7443/JPETSTORE
package org.mybatis.jpetstore.domain; import java.io.Serializable; import java.math.BigDecimal; public class Calculate implements Serializable { public void hello() { System.out.println("JPET Store Application"); System.out.println("Class name: Calculate.java"); System.out.println("Hello World"); System.out.println("Making a new Entry at Thu May 18 11:00:00 UTC 2017"); System.out.println("Thu May 18 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Tue May 16 11:00:00 UTC 2017"); System.out.println("Tue May 16 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sun May 14 11:00:00 UTC 2017"); System.out.println("Sun May 14 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Fri May 12 11:00:00 UTC 2017"); System.out.println("Fri May 12 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Wed May 10 11:00:00 UTC 2017"); System.out.println("Wed May 10 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Mon May 8 11:00:00 UTC 2017"); System.out.println("Mon May 8 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sat May 6 11:00:00 UTC 2017"); System.out.println("Sat May 6 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Thu May 4 11:00:00 UTC 2017"); System.out.println("Thu May 4 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Tue May 2 11:00:00 UTC 2017"); System.out.println("Tue May 2 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sun Apr 30 11:00:00 UTC 2017"); System.out.println("Sun Apr 30 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Fri Apr 28 11:00:00 UTC 2017"); System.out.println("Fri Apr 28 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Wed Apr 26 11:00:00 UTC 2017"); System.out.println("Wed Apr 26 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Mon Apr 24 11:00:00 UTC 2017"); System.out.println("Mon Apr 24 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sat Apr 22 11:00:00 UTC 2017"); System.out.println("Sat Apr 22 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Thu Apr 20 11:00:00 UTC 2017"); System.out.println("Thu Apr 20 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Tue Apr 18 11:00:00 UTC 2017"); System.out.println("Tue Apr 18 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sun Apr 16 11:00:00 UTC 2017"); System.out.println("Sun Apr 16 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Fri Apr 14 11:00:00 UTC 2017"); System.out.println("Fri Apr 14 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Wed Apr 12 11:00:00 UTC 2017"); System.out.println("Wed Apr 12 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Mon Apr 10 11:00:00 UTC 2017"); System.out.println("Mon Apr 10 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sat Apr 8 11:00:00 UTC 2017"); System.out.println("Sat Apr 8 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Thu Apr 6 11:00:00 UTC 2017"); System.out.println("Thu Apr 6 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Tue Apr 4 11:00:00 UTC 2017"); System.out.println("Tue Apr 4 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sun Apr 2 11:00:00 UTC 2017"); System.out.println("Sun Apr 2 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Thu Mar 30 11:00:00 UTC 2017"); System.out.println("Thu Mar 30 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Tue Mar 28 11:00:00 UTC 2017"); System.out.println("Tue Mar 28 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sun Mar 26 11:00:00 UTC 2017"); System.out.println("Sun Mar 26 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Fri Mar 24 11:00:00 UTC 2017"); System.out.println("Fri Mar 24 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Wed Mar 22 11:00:00 UTC 2017"); System.out.println("Wed Mar 22 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Mon Mar 20 11:00:00 UTC 2017"); System.out.println("Mon Mar 20 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sat Mar 18 11:00:00 UTC 2017"); System.out.println("Sat Mar 18 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Thu Mar 16 11:00:00 UTC 2017"); System.out.println("Thu Mar 16 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Tue Mar 14 11:00:00 UTC 2017"); System.out.println("Tue Mar 14 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sun Mar 12 11:00:00 UTC 2017"); System.out.println("Sun Mar 12 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Fri Mar 10 11:00:00 UTC 2017"); System.out.println("Fri Mar 10 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Wed Mar 8 11:00:00 UTC 2017"); System.out.println("Wed Mar 8 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Mon Mar 6 11:00:00 UTC 2017"); System.out.println("Mon Mar 6 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sat Mar 4 11:00:00 UTC 2017"); System.out.println("Sat Mar 4 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Thu Mar 2 11:00:00 UTC 2017"); System.out.println("Thu Mar 2 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Tue Feb 28 11:00:00 UTC 2017"); System.out.println("Tue Feb 28 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sun Feb 26 11:00:00 UTC 2017"); System.out.println("Sun Feb 26 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Fri Feb 24 11:00:00 UTC 2017"); System.out.println("Fri Feb 24 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Wed Feb 22 11:00:00 UTC 2017"); System.out.println("Wed Feb 22 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Mon Feb 20 11:00:00 UTC 2017"); System.out.println("Mon Feb 20 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sat Feb 18 11:00:00 UTC 2017"); System.out.println("Sat Feb 18 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Thu Feb 16 11:00:00 UTC 2017"); System.out.println("Thu Feb 16 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Tue Feb 14 11:00:00 UTC 2017"); System.out.println("Tue Feb 14 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sun Feb 12 11:00:00 UTC 2017"); System.out.println("Sun Feb 12 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Fri Feb 10 11:00:00 UTC 2017"); System.out.println("Fri Feb 10 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Wed Feb 8 11:00:00 UTC 2017"); System.out.println("Wed Feb 8 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Mon Feb 6 11:00:00 UTC 2017"); System.out.println("Mon Feb 6 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sat Feb 4 11:00:00 UTC 2017"); System.out.println("Sat Feb 4 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Thu Feb 2 11:00:00 UTC 2017"); System.out.println("Thu Feb 2 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Mon Jan 30 11:00:00 UTC 2017"); System.out.println("Mon Jan 30 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sat Jan 28 11:00:15 UTC 2017"); System.out.println("Sat Jan 28 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Thu Jan 26 11:00:15 UTC 2017"); System.out.println("Thu Jan 26 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Tue Jan 24 11:00:15 UTC 2017"); System.out.println("Tue Jan 24 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Sun Jan 22 11:00:15 UTC 2017"); System.out.println("Sun Jan 22 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Fri Jan 20 11:00:15 UTC 2017"); System.out.println("Fri Jan 20 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Wed Jan 18 11:00:15 UTC 2017"); System.out.println("Wed Jan 18 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Mon Jan 16 11:00:15 UTC 2017"); System.out.println("Mon Jan 16 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Sat Jan 14 11:00:15 UTC 2017"); System.out.println("Sat Jan 14 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Thu Jan 12 11:00:15 UTC 2017"); System.out.println("Thu Jan 12 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Tue Jan 10 11:00:15 UTC 2017"); System.out.println("Tue Jan 10 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Sun Jan 8 11:00:15 UTC 2017"); System.out.println("Sun Jan 8 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Fri Jan 6 11:00:15 UTC 2017"); System.out.println("Fri Jan 6 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Wed Jan 4 11:00:15 UTC 2017"); System.out.println("Wed Jan 4 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Mon Jan 2 11:00:15 UTC 2017"); System.out.println("Mon Jan 2 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Fri Dec 30 11:00:16 UTC 2016"); System.out.println("Fri Dec 30 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Wed Dec 28 11:00:16 UTC 2016"); System.out.println("Wed Dec 28 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Mon Dec 26 11:00:16 UTC 2016"); System.out.println("Mon Dec 26 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Sat Dec 24 11:00:16 UTC 2016"); System.out.println("Sat Dec 24 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Thu Dec 22 11:00:16 UTC 2016"); System.out.println("Thu Dec 22 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Tue Dec 20 11:00:16 UTC 2016"); System.out.println("Tue Dec 20 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Sun Dec 18 11:00:16 UTC 2016"); System.out.println("Sun Dec 18 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Fri Dec 16 11:00:16 UTC 2016"); System.out.println("Fri Dec 16 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Wed Dec 14 11:00:16 UTC 2016"); System.out.println("Wed Dec 14 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Mon Dec 12 11:00:16 UTC 2016"); System.out.println("Mon Dec 12 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Sat Dec 10 11:00:16 UTC 2016"); System.out.println("Sat Dec 10 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Thu Dec 8 11:00:16 UTC 2016"); System.out.println("Thu Dec 8 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Tue Dec 6 11:00:16 UTC 2016"); System.out.println("Tue Dec 6 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Fri Dec 2 12:52:58 UTC 2016"); System.out.println("Fri Dec 2 12:52:58 UTC 2016"); } } //---------------------------------------------------- //Comment added on date:Fri Dec 2 09:45:31 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //Comment added on date:Fri Dec 2 09:55:14 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Dec 2 11:34:52 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Dec 2 11:35:25 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Dec 2 12:32:47 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 05:39:41 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 05:41:08 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 05:41:14 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 06:05:33 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Dec 7 05:08:34 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Dec 7 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Dec 9 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Dec 11 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Dec 13 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Dec 15 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Dec 17 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 19 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Dec 21 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Dec 23 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Dec 25 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Dec 27 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Dec 29 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Dec 31 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Jan 1 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Jan 3 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Jan 5 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Jan 7 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Jan 9 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Jan 11 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Jan 13 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Jan 15 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Jan 17 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Jan 19 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Jan 21 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Jan 23 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Jan 25 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Jan 27 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Jan 29 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Jan 31 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Feb 1 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Feb 3 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Feb 5 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Feb 7 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Feb 9 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Feb 11 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Feb 13 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Feb 15 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Feb 17 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Feb 19 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Feb 21 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Feb 23 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Feb 25 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Feb 27 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Mar 1 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Mar 3 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Mar 5 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Mar 7 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Mar 9 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Mar 11 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Mar 13 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Mar 15 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Mar 17 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Mar 19 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Mar 21 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Mar 23 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Mar 25 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Mar 27 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Mar 29 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Mar 31 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Apr 1 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Apr 3 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Apr 5 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Apr 7 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Apr 9 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Apr 11 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Apr 13 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Apr 15 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Apr 17 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Apr 19 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Apr 21 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Apr 23 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Apr 25 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Apr 27 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Apr 29 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon May 1 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed May 3 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri May 5 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun May 7 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue May 9 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu May 11 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat May 13 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon May 15 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed May 17 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri May 19 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //----------------------------------------------------
src/main/java/org/mybatis/jpetstore/domain/Calculate.java
package org.mybatis.jpetstore.domain; import java.io.Serializable; import java.math.BigDecimal; public class Calculate implements Serializable { public void hello() { System.out.println("JPET Store Application"); System.out.println("Class name: Calculate.java"); System.out.println("Hello World"); System.out.println("Making a new Entry at Thu May 18 11:00:00 UTC 2017"); System.out.println("Thu May 18 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Tue May 16 11:00:00 UTC 2017"); System.out.println("Tue May 16 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sun May 14 11:00:00 UTC 2017"); System.out.println("Sun May 14 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Fri May 12 11:00:00 UTC 2017"); System.out.println("Fri May 12 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Wed May 10 11:00:00 UTC 2017"); System.out.println("Wed May 10 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Mon May 8 11:00:00 UTC 2017"); System.out.println("Mon May 8 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sat May 6 11:00:00 UTC 2017"); System.out.println("Sat May 6 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Thu May 4 11:00:00 UTC 2017"); System.out.println("Thu May 4 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Tue May 2 11:00:00 UTC 2017"); System.out.println("Tue May 2 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sun Apr 30 11:00:00 UTC 2017"); System.out.println("Sun Apr 30 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Fri Apr 28 11:00:00 UTC 2017"); System.out.println("Fri Apr 28 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Wed Apr 26 11:00:00 UTC 2017"); System.out.println("Wed Apr 26 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Mon Apr 24 11:00:00 UTC 2017"); System.out.println("Mon Apr 24 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sat Apr 22 11:00:00 UTC 2017"); System.out.println("Sat Apr 22 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Thu Apr 20 11:00:00 UTC 2017"); System.out.println("Thu Apr 20 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Tue Apr 18 11:00:00 UTC 2017"); System.out.println("Tue Apr 18 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sun Apr 16 11:00:00 UTC 2017"); System.out.println("Sun Apr 16 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Fri Apr 14 11:00:00 UTC 2017"); System.out.println("Fri Apr 14 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Wed Apr 12 11:00:00 UTC 2017"); System.out.println("Wed Apr 12 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Mon Apr 10 11:00:00 UTC 2017"); System.out.println("Mon Apr 10 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sat Apr 8 11:00:00 UTC 2017"); System.out.println("Sat Apr 8 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Thu Apr 6 11:00:00 UTC 2017"); System.out.println("Thu Apr 6 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Tue Apr 4 11:00:00 UTC 2017"); System.out.println("Tue Apr 4 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sun Apr 2 11:00:00 UTC 2017"); System.out.println("Sun Apr 2 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Thu Mar 30 11:00:00 UTC 2017"); System.out.println("Thu Mar 30 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Tue Mar 28 11:00:00 UTC 2017"); System.out.println("Tue Mar 28 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sun Mar 26 11:00:00 UTC 2017"); System.out.println("Sun Mar 26 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Fri Mar 24 11:00:00 UTC 2017"); System.out.println("Fri Mar 24 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Wed Mar 22 11:00:00 UTC 2017"); System.out.println("Wed Mar 22 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Mon Mar 20 11:00:00 UTC 2017"); System.out.println("Mon Mar 20 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sat Mar 18 11:00:00 UTC 2017"); System.out.println("Sat Mar 18 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Thu Mar 16 11:00:00 UTC 2017"); System.out.println("Thu Mar 16 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Tue Mar 14 11:00:00 UTC 2017"); System.out.println("Tue Mar 14 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sun Mar 12 11:00:00 UTC 2017"); System.out.println("Sun Mar 12 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Fri Mar 10 11:00:00 UTC 2017"); System.out.println("Fri Mar 10 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Wed Mar 8 11:00:00 UTC 2017"); System.out.println("Wed Mar 8 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Mon Mar 6 11:00:00 UTC 2017"); System.out.println("Mon Mar 6 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sat Mar 4 11:00:00 UTC 2017"); System.out.println("Sat Mar 4 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Thu Mar 2 11:00:00 UTC 2017"); System.out.println("Thu Mar 2 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Tue Feb 28 11:00:00 UTC 2017"); System.out.println("Tue Feb 28 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sun Feb 26 11:00:00 UTC 2017"); System.out.println("Sun Feb 26 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Fri Feb 24 11:00:00 UTC 2017"); System.out.println("Fri Feb 24 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Wed Feb 22 11:00:00 UTC 2017"); System.out.println("Wed Feb 22 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Mon Feb 20 11:00:00 UTC 2017"); System.out.println("Mon Feb 20 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sat Feb 18 11:00:00 UTC 2017"); System.out.println("Sat Feb 18 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Thu Feb 16 11:00:00 UTC 2017"); System.out.println("Thu Feb 16 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Tue Feb 14 11:00:00 UTC 2017"); System.out.println("Tue Feb 14 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sun Feb 12 11:00:00 UTC 2017"); System.out.println("Sun Feb 12 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Fri Feb 10 11:00:00 UTC 2017"); System.out.println("Fri Feb 10 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Wed Feb 8 11:00:00 UTC 2017"); System.out.println("Wed Feb 8 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Mon Feb 6 11:00:00 UTC 2017"); System.out.println("Mon Feb 6 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sat Feb 4 11:00:00 UTC 2017"); System.out.println("Sat Feb 4 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Thu Feb 2 11:00:00 UTC 2017"); System.out.println("Thu Feb 2 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Mon Jan 30 11:00:00 UTC 2017"); System.out.println("Mon Jan 30 11:00:00 UTC 2017"); System.out.println("Making a new Entry at Sat Jan 28 11:00:15 UTC 2017"); System.out.println("Sat Jan 28 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Thu Jan 26 11:00:15 UTC 2017"); System.out.println("Thu Jan 26 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Tue Jan 24 11:00:15 UTC 2017"); System.out.println("Tue Jan 24 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Sun Jan 22 11:00:15 UTC 2017"); System.out.println("Sun Jan 22 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Fri Jan 20 11:00:15 UTC 2017"); System.out.println("Fri Jan 20 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Wed Jan 18 11:00:15 UTC 2017"); System.out.println("Wed Jan 18 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Mon Jan 16 11:00:15 UTC 2017"); System.out.println("Mon Jan 16 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Sat Jan 14 11:00:15 UTC 2017"); System.out.println("Sat Jan 14 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Thu Jan 12 11:00:15 UTC 2017"); System.out.println("Thu Jan 12 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Tue Jan 10 11:00:15 UTC 2017"); System.out.println("Tue Jan 10 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Sun Jan 8 11:00:15 UTC 2017"); System.out.println("Sun Jan 8 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Fri Jan 6 11:00:15 UTC 2017"); System.out.println("Fri Jan 6 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Wed Jan 4 11:00:15 UTC 2017"); System.out.println("Wed Jan 4 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Mon Jan 2 11:00:15 UTC 2017"); System.out.println("Mon Jan 2 11:00:15 UTC 2017"); System.out.println("Making a new Entry at Fri Dec 30 11:00:16 UTC 2016"); System.out.println("Fri Dec 30 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Wed Dec 28 11:00:16 UTC 2016"); System.out.println("Wed Dec 28 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Mon Dec 26 11:00:16 UTC 2016"); System.out.println("Mon Dec 26 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Sat Dec 24 11:00:16 UTC 2016"); System.out.println("Sat Dec 24 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Thu Dec 22 11:00:16 UTC 2016"); System.out.println("Thu Dec 22 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Tue Dec 20 11:00:16 UTC 2016"); System.out.println("Tue Dec 20 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Sun Dec 18 11:00:16 UTC 2016"); System.out.println("Sun Dec 18 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Fri Dec 16 11:00:16 UTC 2016"); System.out.println("Fri Dec 16 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Wed Dec 14 11:00:16 UTC 2016"); System.out.println("Wed Dec 14 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Mon Dec 12 11:00:16 UTC 2016"); System.out.println("Mon Dec 12 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Sat Dec 10 11:00:16 UTC 2016"); System.out.println("Sat Dec 10 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Thu Dec 8 11:00:16 UTC 2016"); System.out.println("Thu Dec 8 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Tue Dec 6 11:00:16 UTC 2016"); System.out.println("Tue Dec 6 11:00:16 UTC 2016"); System.out.println("Making a new Entry at Fri Dec 2 12:52:58 UTC 2016"); System.out.println("Fri Dec 2 12:52:58 UTC 2016"); } } //---------------------------------------------------- //Comment added on date:Fri Dec 2 09:45:31 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //Comment added on date:Fri Dec 2 09:55:14 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Dec 2 11:34:52 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Dec 2 11:35:25 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Dec 2 12:32:47 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 05:39:41 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 05:41:08 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 05:41:14 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 06:05:33 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 5 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Dec 7 05:08:34 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Dec 7 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Dec 9 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Dec 11 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Dec 13 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Dec 15 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Dec 17 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Dec 19 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Dec 21 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Dec 23 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Dec 25 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Dec 27 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Dec 29 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Dec 31 11:00:16 UTC 2016 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Jan 1 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Jan 3 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Jan 5 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Jan 7 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Jan 9 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Jan 11 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Jan 13 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Jan 15 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Jan 17 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Jan 19 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Jan 21 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Jan 23 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Jan 25 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Jan 27 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Jan 29 11:00:15 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Jan 31 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Feb 1 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Feb 3 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Feb 5 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Feb 7 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Feb 9 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Feb 11 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Feb 13 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Feb 15 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Feb 17 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Feb 19 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Feb 21 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Feb 23 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Feb 25 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Feb 27 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Mar 1 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Mar 3 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Mar 5 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Mar 7 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Mar 9 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Mar 11 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Mar 13 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Mar 15 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Mar 17 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Mar 19 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Mar 21 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Mar 23 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Mar 25 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Mar 27 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Mar 29 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Mar 31 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Apr 1 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Apr 3 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Apr 5 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Apr 7 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Apr 9 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Apr 11 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Apr 13 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Apr 15 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon Apr 17 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed Apr 19 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri Apr 21 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun Apr 23 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue Apr 25 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu Apr 27 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat Apr 29 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon May 1 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed May 3 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Fri May 5 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sun May 7 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Tue May 9 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Thu May 11 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Sat May 13 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Mon May 15 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //---------------------------------------------------- //---------------------------------------------------- //Comment added on date:Wed May 17 11:00:00 UTC 2017 //Author: Andrew Woods, Apoorva Rao //Description: Adding coments for documentation //Project: JpetStore //Tools used: Jenkins, SonarQube, Rundeck //----------------------------------------------------
Fri May 19 11:00:00 UTC 2017
src/main/java/org/mybatis/jpetstore/domain/Calculate.java
Fri May 19 11:00:00 UTC 2017
Java
apache-2.0
0894ebbcc0dbb6771ac9f44b0f8952d7c21fbba3
0
hopshadoop/hops,smkniazi/hops,hopshadoop/hops,smkniazi/hops,hopshadoop/hops,hopshadoop/hops,hopshadoop/hops,smkniazi/hops,hopshadoop/hops,hopshadoop/hops,hopshadoop/hops,smkniazi/hops,smkniazi/hops,smkniazi/hops,smkniazi/hops,smkniazi/hops
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.datanode; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.TreeMap; /** * DataBlockScanner manages block scanning for all the block pools. For each * block pool a {@link BlockPoolSliceScanner} is created which runs in a * separate * thread to scan the blocks for that block pool. When a {@link BPOfferService} * becomes alive or dies, blockPoolScannerMap in this class is updated. */ @InterfaceAudience.Private public class DataBlockScanner implements Runnable { public static final Log LOG = LogFactory.getLog(DataBlockScanner.class); private final DataNode datanode; private final FsDatasetSpi<? extends FsVolumeSpi> dataset; private final Configuration conf; static final int SLEEP_PERIOD_MS = 5 * 1000; /** * Map to find the BlockPoolScanner for a given block pool id. This is * updated * when a BPOfferService becomes alive or dies. */ private final TreeMap<String, BlockPoolSliceScanner> blockPoolScannerMap = new TreeMap<>(); Thread blockScannerThread = null; DataBlockScanner(DataNode datanode, FsDatasetSpi<? extends FsVolumeSpi> dataset, Configuration conf) { this.datanode = datanode; this.dataset = dataset; this.conf = conf; } @Override public void run() { String currentBpId = ""; boolean firstRun = true; while (datanode.shouldRun && !Thread.interrupted()) { //Sleep everytime except in the first iteration. if (!firstRun) { try { Thread.sleep(SLEEP_PERIOD_MS); } catch (InterruptedException ex) { // Interrupt itself again to set the interrupt status blockScannerThread.interrupt(); continue; } } else { firstRun = false; } BlockPoolSliceScanner bpScanner = getNextBPScanner(currentBpId); if (bpScanner == null) { // Possible if thread is interrupted continue; } currentBpId = bpScanner.getBlockPoolId(); // If BPOfferService for this pool is not alive, don't process it if (!datanode.isBPServiceAlive(currentBpId)) { LOG.warn("Block Pool " + currentBpId + " is not alive"); // Remove in case BP service died abruptly without proper shutdown removeBlockPool(currentBpId); continue; } bpScanner.scanBlockPoolSlice(); } // Call shutdown for each allocated BlockPoolSliceScanner. for (BlockPoolSliceScanner bpss : blockPoolScannerMap.values()) { bpss.shutdown(); } } // Wait for at least one block pool to be up private void waitForInit() { while ((getBlockPoolSetSize() < datanode.getAllBpOs().length) || (getBlockPoolSetSize() < 1)) { try { Thread.sleep(SLEEP_PERIOD_MS); } catch (InterruptedException e) { blockScannerThread.interrupt(); return; } } } /** * Find next block pool id to scan. There should be only one current * verification log file. Find which block pool contains the current * verification log file and that is used as the starting block pool id. If * no * current files are found start with first block-pool in the blockPoolSet. * However, if more than one current files are found, the one with latest * modification time is used to find the next block pool id. */ private BlockPoolSliceScanner getNextBPScanner(String currentBpId) { String nextBpId = null; while ((nextBpId == null) && datanode.shouldRun && !blockScannerThread.isInterrupted()) { waitForInit(); synchronized (this) { if (getBlockPoolSetSize() > 0) { // Find nextBpId by the minimum of the last scan time long lastScanTime = 0; for (String bpid : blockPoolScannerMap.keySet()) { final long t = getBPScanner(bpid).getLastScanTime(); if (t != 0L) { if (bpid == null || t < lastScanTime) { lastScanTime = t; nextBpId = bpid; } } } // nextBpId can still be null if no current log is found, // find nextBpId sequentially. if (nextBpId == null) { nextBpId = blockPoolScannerMap.higherKey(currentBpId); if (nextBpId == null) { nextBpId = blockPoolScannerMap.firstKey(); } } if (nextBpId != null) { return getBPScanner(nextBpId); } } } LOG.warn("No block pool is up, going to wait"); try { Thread.sleep(5000); } catch (InterruptedException ex) { LOG.warn("Received exception: " + ex); blockScannerThread.interrupt(); return null; } } return null; } private synchronized int getBlockPoolSetSize() { return blockPoolScannerMap.size(); } @VisibleForTesting synchronized BlockPoolSliceScanner getBPScanner(String bpid) { return blockPoolScannerMap.get(bpid); } private synchronized String[] getBpIdList() { return blockPoolScannerMap.keySet() .toArray(new String[blockPoolScannerMap.keySet().size()]); } public void addBlock(ExtendedBlock block) { BlockPoolSliceScanner bpScanner = getBPScanner(block.getBlockPoolId()); if (bpScanner != null) { bpScanner.addBlock(block); } else { LOG.warn("No block pool scanner found for block pool id: " + block.getBlockPoolId()); } } boolean isInitialized(String bpid) { return getBPScanner(bpid) != null; } public synchronized void printBlockReport(StringBuilder buffer, boolean summary) { String[] bpIdList = getBpIdList(); if (bpIdList == null || bpIdList.length == 0) { buffer.append("Periodic block scanner is not yet initialized. " + "Please check back again after some time."); return; } for (String bpid : bpIdList) { BlockPoolSliceScanner bpScanner = getBPScanner(bpid); buffer.append("\n\nBlock report for block pool: "+bpid + "\n"); bpScanner.printBlockReport(buffer, summary); buffer.append("\n"); } } public void deleteBlock(String poolId, Block toDelete) { BlockPoolSliceScanner bpScanner = getBPScanner(poolId); if (bpScanner != null) { bpScanner.deleteBlock(toDelete); } else { LOG.warn("No block pool scanner found for block pool id: " + poolId); } } public void deleteBlocks(String poolId, Block[] toDelete) { BlockPoolSliceScanner bpScanner = getBPScanner(poolId); if (bpScanner != null) { bpScanner.deleteBlocks(toDelete); } else { LOG.warn("No block pool scanner found for block pool id: " + poolId); } } public void shutdown() { synchronized (this) { if (blockScannerThread != null) { blockScannerThread.interrupt(); } } // We cannot join within the synchronized block, because it would create a // deadlock situation. blockScannerThread calls other synchronized methods. if (blockScannerThread != null) { try { blockScannerThread.join(); } catch (InterruptedException e) { // shutting down anyway } } } public synchronized void addBlockPool(String blockPoolId) { if (blockPoolScannerMap.get(blockPoolId) != null) { return; } BlockPoolSliceScanner bpScanner = new BlockPoolSliceScanner(blockPoolId, datanode, dataset, conf); blockPoolScannerMap.put(blockPoolId, bpScanner); LOG.info( "Added bpid=" + blockPoolId + " to blockPoolScannerMap, new size=" + blockPoolScannerMap.size()); } public synchronized void removeBlockPool(String blockPoolId) { BlockPoolSliceScanner bpss = blockPoolScannerMap.remove(blockPoolId); if (bpss != null) { bpss.shutdown(); } LOG.info("Removed bpid=" + blockPoolId + " from blockPoolScannerMap"); } @VisibleForTesting long getBlocksScannedInLastRun(String bpid) throws IOException { BlockPoolSliceScanner bpScanner = getBPScanner(bpid); if (bpScanner == null) { throw new IOException("Block Pool: " + bpid + " is not running"); } else { return bpScanner.getBlocksScannedInLastRun(); } } @VisibleForTesting long getTotalScans(String bpid) throws IOException { BlockPoolSliceScanner bpScanner = getBPScanner(bpid); if (bpScanner == null) { throw new IOException("Block Pool: " + bpid + " is not running"); } else { return bpScanner.getTotalScans(); } } public void start() { blockScannerThread = new Thread(this); blockScannerThread.setDaemon(true); blockScannerThread.start(); } @InterfaceAudience.Private public static class Servlet extends HttpServlet { private static final long serialVersionUID = 1L; @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { response.setContentType("text/plain"); DataNode datanode = (DataNode) getServletContext().getAttribute("datanode"); DataBlockScanner blockScanner = datanode.blockScanner; boolean summary = (request.getParameter("listblocks") == null); StringBuilder buffer = new StringBuilder(8 * 1024); if (blockScanner == null) { LOG.warn("Periodic block scanner is not running"); buffer.append("Periodic block scanner is not running. " + "Please check the datanode log if this is unexpected."); } else { blockScanner.printBlockReport(buffer, summary); } response.getWriter().write(buffer.toString()); // extra copy! } } }
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataBlockScanner.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.datanode; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.TreeMap; /** * DataBlockScanner manages block scanning for all the block pools. For each * block pool a {@link BlockPoolSliceScanner} is created which runs in a * separate * thread to scan the blocks for that block pool. When a {@link BPOfferService} * becomes alive or dies, blockPoolScannerMap in this class is updated. */ @InterfaceAudience.Private public class DataBlockScanner implements Runnable { public static final Log LOG = LogFactory.getLog(DataBlockScanner.class); private final DataNode datanode; private final FsDatasetSpi<? extends FsVolumeSpi> dataset; private final Configuration conf; static final int SLEEP_PERIOD_MS = 5 * 1000; /** * Map to find the BlockPoolScanner for a given block pool id. This is * updated * when a BPOfferService becomes alive or dies. */ private final TreeMap<String, BlockPoolSliceScanner> blockPoolScannerMap = new TreeMap<>(); Thread blockScannerThread = null; DataBlockScanner(DataNode datanode, FsDatasetSpi<? extends FsVolumeSpi> dataset, Configuration conf) { this.datanode = datanode; this.dataset = dataset; this.conf = conf; } @Override public void run() { String currentBpId = ""; boolean firstRun = true; while (datanode.shouldRun && !Thread.interrupted()) { //Sleep everytime except in the first iteration. if (!firstRun) { try { Thread.sleep(SLEEP_PERIOD_MS); } catch (InterruptedException ex) { // Interrupt itself again to set the interrupt status blockScannerThread.interrupt(); continue; } } else { firstRun = false; } BlockPoolSliceScanner bpScanner = getNextBPScanner(currentBpId); if (bpScanner == null) { // Possible if thread is interrupted continue; } currentBpId = bpScanner.getBlockPoolId(); // If BPOfferService for this pool is not alive, don't process it if (!datanode.isBPServiceAlive(currentBpId)) { LOG.warn("Block Pool " + currentBpId + " is not alive"); // Remove in case BP service died abruptly without proper shutdown removeBlockPool(currentBpId); continue; } bpScanner.scanBlockPoolSlice(); } // Call shutdown for each allocated BlockPoolSliceScanner. for (BlockPoolSliceScanner bpss : blockPoolScannerMap.values()) { bpss.shutdown(); } } // Wait for at least one block pool to be up private void waitForInit() { while ((getBlockPoolSetSize() < datanode.getAllBpOs().length) || (getBlockPoolSetSize() < 1)) { try { Thread.sleep(SLEEP_PERIOD_MS); } catch (InterruptedException e) { blockScannerThread.interrupt(); return; } } } /** * Find next block pool id to scan. There should be only one current * verification log file. Find which block pool contains the current * verification log file and that is used as the starting block pool id. If * no * current files are found start with first block-pool in the blockPoolSet. * However, if more than one current files are found, the one with latest * modification time is used to find the next block pool id. */ private BlockPoolSliceScanner getNextBPScanner(String currentBpId) { String nextBpId = null; while ((nextBpId == null) && datanode.shouldRun && !blockScannerThread.isInterrupted()) { waitForInit(); synchronized (this) { if (getBlockPoolSetSize() > 0) { // Find nextBpId by the minimum of the last scan time long lastScanTime = 0; for (String bpid : blockPoolScannerMap.keySet()) { final long t = getBPScanner(bpid).getLastScanTime(); if (t != 0L) { if (bpid == null || t < lastScanTime) { lastScanTime = t; nextBpId = bpid; } } } // nextBpId can still be null if no current log is found, // find nextBpId sequentially. if (nextBpId == null) { nextBpId = blockPoolScannerMap.higherKey(currentBpId); if (nextBpId == null) { nextBpId = blockPoolScannerMap.firstKey(); } } if (nextBpId != null) { return getBPScanner(nextBpId); } } } LOG.warn("No block pool is up, going to wait"); try { Thread.sleep(5000); } catch (InterruptedException ex) { LOG.warn("Received exception: " + ex); blockScannerThread.interrupt(); return null; } } return null; } private synchronized int getBlockPoolSetSize() { return blockPoolScannerMap.size(); } @VisibleForTesting synchronized BlockPoolSliceScanner getBPScanner(String bpid) { return blockPoolScannerMap.get(bpid); } private synchronized String[] getBpIdList() { return blockPoolScannerMap.keySet() .toArray(new String[blockPoolScannerMap.keySet().size()]); } public void addBlock(ExtendedBlock block) { BlockPoolSliceScanner bpScanner = getBPScanner(block.getBlockPoolId()); if (bpScanner != null) { bpScanner.addBlock(block); } else { LOG.warn("No block pool scanner found for block pool id: " + block.getBlockPoolId()); } } boolean isInitialized(String bpid) { return getBPScanner(bpid) != null; } public synchronized void printBlockReport(StringBuilder buffer, boolean summary) { String[] bpIdList = getBpIdList(); if (bpIdList == null || bpIdList.length == 0) { buffer.append("Periodic block scanner is not yet initialized. " + "Please check back again after some time."); return; } for (String bpid : bpIdList) { BlockPoolSliceScanner bpScanner = getBPScanner(bpid); buffer.append("\n\nBlock report for block pool: "+bpid + "\n"); bpScanner.printBlockReport(buffer, summary); buffer.append("\n"); } } public void deleteBlock(String poolId, Block toDelete) { BlockPoolSliceScanner bpScanner = getBPScanner(poolId); if (bpScanner != null) { bpScanner.deleteBlock(toDelete); } else { LOG.warn("No block pool scanner found for block pool id: " + poolId); } } public void deleteBlocks(String poolId, Block[] toDelete) { BlockPoolSliceScanner bpScanner = getBPScanner(poolId); if (bpScanner != null) { bpScanner.deleteBlocks(toDelete); } else { LOG.warn("No block pool scanner found for block pool id: " + poolId); } } public void shutdown() { synchronized (this) { if (blockScannerThread != null) { blockScannerThread.interrupt(); } } // We cannot join within the synchronized block, because it would create a // deadlock situation. blockScannerThread calls other synchronized methods. if (blockScannerThread != null) { try { blockScannerThread.join(); } catch (InterruptedException e) { // shutting down anyway } } } public synchronized void addBlockPool(String blockPoolId) { if (blockPoolScannerMap.get(blockPoolId) != null) { return; } BlockPoolSliceScanner bpScanner = new BlockPoolSliceScanner(blockPoolId, datanode, dataset, conf); blockPoolScannerMap.put(blockPoolId, bpScanner); LOG.info( "Added bpid=" + blockPoolId + " to blockPoolScannerMap, new size=" + blockPoolScannerMap.size()); } public synchronized void removeBlockPool(String blockPoolId) { blockPoolScannerMap.remove(blockPoolId); LOG.info("Removed bpid=" + blockPoolId + " from blockPoolScannerMap"); } @VisibleForTesting long getBlocksScannedInLastRun(String bpid) throws IOException { BlockPoolSliceScanner bpScanner = getBPScanner(bpid); if (bpScanner == null) { throw new IOException("Block Pool: " + bpid + " is not running"); } else { return bpScanner.getBlocksScannedInLastRun(); } } @VisibleForTesting long getTotalScans(String bpid) throws IOException { BlockPoolSliceScanner bpScanner = getBPScanner(bpid); if (bpScanner == null) { throw new IOException("Block Pool: " + bpid + " is not running"); } else { return bpScanner.getTotalScans(); } } public void start() { blockScannerThread = new Thread(this); blockScannerThread.setDaemon(true); blockScannerThread.start(); } @InterfaceAudience.Private public static class Servlet extends HttpServlet { private static final long serialVersionUID = 1L; @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { response.setContentType("text/plain"); DataNode datanode = (DataNode) getServletContext().getAttribute("datanode"); DataBlockScanner blockScanner = datanode.blockScanner; boolean summary = (request.getParameter("listblocks") == null); StringBuilder buffer = new StringBuilder(8 * 1024); if (blockScanner == null) { LOG.warn("Periodic block scanner is not running"); buffer.append("Periodic block scanner is not running. " + "Please check the datanode log if this is unexpected."); } else { blockScanner.printBlockReport(buffer, summary); } response.getWriter().write(buffer.toString()); // extra copy! } } }
[HOPS-198] apply HDFS-4768
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataBlockScanner.java
[HOPS-198] apply HDFS-4768
Java
apache-2.0
fbde4337f1267283391384f21b90e34930b38118
0
aesteve/vertx-feeds,aesteve/vertx-feeds,aesteve/vertx-feeds,aesteve/vertx-feeds
package io.vertx.examples.feeds.verticles; import io.vertx.core.AbstractVerticle; import io.vertx.core.Context; import io.vertx.core.DeploymentOptions; import io.vertx.core.Future; import io.vertx.core.Handler; import io.vertx.core.Vertx; import io.vertx.core.json.JsonObject; import io.vertx.examples.feeds.utils.async.MultipleFutures; import java.util.ArrayList; import java.util.List; /** * Main verticle, orchestrate the instanciation of other verticles */ public class MainVerticle extends AbstractVerticle { public static final int REDIS_PORT = 8888; public static final int MONGO_PORT = 8889; private List<String> deploymentIds; @Override public void init(Vertx vertx, Context context) { super.init(vertx, context); deploymentIds = new ArrayList<String>(3); } @Override public void start(Future<Void> future) { deployEmbeddedDbs(future, this::deployFeedBroker); } private void deployEmbeddedDbs(Future<Void> future, Handler<Future<Void>> whatsNext) { MultipleFutures dbDeployments = new MultipleFutures(); dbDeployments.add(this::deployEmbeddedRedis); dbDeployments.add(this::deployEmbeddedMongo); dbDeployments.setHandler(result -> { if (result.failed()) { future.fail(result.cause()); } else { whatsNext.handle(future); } }); dbDeployments.start(); } private void deployEmbeddedRedis(Future<Void> future) { DeploymentOptions options = new DeploymentOptions(); options.setWorker(true); vertx.deployVerticle(EmbeddedRedis.class.getName(), options, result -> { if (result.failed()) { future.fail(result.cause()); } else { deploymentIds.add(result.result()); future.complete(); } }); } private void deployEmbeddedMongo(Future<Void> future) { DeploymentOptions options = new DeploymentOptions(); options.setWorker(true); vertx.deployVerticle(EmbeddedMongo.class.getName(), options, result -> { if (result.failed()) { future.fail(result.cause()); } else { deploymentIds.add(result.result()); future.complete(); } }); } private void deployFeedBroker(Future<Void> future) { JsonObject dbConfig = new JsonObject(); dbConfig.put("redis", redisConfig()); dbConfig.put("mongo", mongoConfig()); DeploymentOptions brokerOptions = new DeploymentOptions(); brokerOptions.setConfig(dbConfig); vertx.deployVerticle(FeedBroker.class.getName(), brokerOptions, brokerResult -> { if (brokerResult.failed()) { future.fail(brokerResult.cause()); } else { deploymentIds.add(brokerResult.result()); DeploymentOptions webserverOptions = new DeploymentOptions(); webserverOptions.setConfig(dbConfig); vertx.deployVerticle(WebServer.class.getName(), webserverOptions, serverResult -> { if (serverResult.failed()) { future.fail(serverResult.cause()); } else { deploymentIds.add(serverResult.result()); future.complete(); } }); } }); } @Override public void stop(Future<Void> future) { MultipleFutures futures = new MultipleFutures(future); deploymentIds.forEach(deploymentId -> { futures.add(fut -> { undeploy(deploymentId, fut); }); }); futures.start(); } private void undeploy(String deploymentId, Future<Void> future) { vertx.undeploy(deploymentId, res -> { if (res.succeeded()) { future.complete(); } else { future.fail(res.cause()); } }); } private static JsonObject mongoConfig() { JsonObject config = new JsonObject(); config.put("host", "localhost"); config.put("port", MONGO_PORT); config.put("db_name", "vertx-feeds"); return config; } private static JsonObject redisConfig() { JsonObject config = new JsonObject(); config.put("host", "localhost"); config.put("port", REDIS_PORT); return config; } }
src/main/java/io/vertx/examples/feeds/verticles/MainVerticle.java
package io.vertx.examples.feeds.verticles; import io.vertx.core.AbstractVerticle; import io.vertx.core.Context; import io.vertx.core.DeploymentOptions; import io.vertx.core.Future; import io.vertx.core.Handler; import io.vertx.core.Vertx; import io.vertx.core.json.JsonObject; import io.vertx.examples.feeds.utils.async.MultipleFutures; import java.util.ArrayList; import java.util.List; /** * Main verticle, orchestrate the instanciation of other verticles */ public class MainVerticle extends AbstractVerticle { public static final int REDIS_PORT = 8888; public static final int MONGO_PORT = 8889; private List<String> deploymentIds; @Override public void init(Vertx vertx, Context context) { super.init(vertx, context); deploymentIds = new ArrayList<String>(3); } @Override public void start(Future<Void> future) { deployEmbeddedDbs(future, this::deployFeedBroker); } private void deployEmbeddedDbs(Future<Void> future, Handler<Future<Void>> whatsNext) { MultipleFutures dbDeployments = new MultipleFutures(); dbDeployments.add(this::deployEmbeddedRedis); dbDeployments.add(this::deployEmbeddedMongo); dbDeployments.setHandler(result -> { if (result.failed()) { future.fail(result.cause()); } else { whatsNext.handle(future); } }); dbDeployments.start(); } private void deployEmbeddedRedis(Future<Void> future) { DeploymentOptions options = new DeploymentOptions(); options.setWorker(true); vertx.deployVerticle(EmbeddedRedis.class.getName(), options, result -> { if (result.failed()) { future.fail(result.cause()); } else { future.complete(); } }); } private void deployEmbeddedMongo(Future<Void> future) { DeploymentOptions options = new DeploymentOptions(); options.setWorker(true); vertx.deployVerticle(EmbeddedMongo.class.getName(), options, result -> { if (result.failed()) { future.fail(result.cause()); } else { future.complete(); } }); } private void deployFeedBroker(Future<Void> future) { JsonObject dbConfig = new JsonObject(); dbConfig.put("redis", redisConfig()); dbConfig.put("mongo", mongoConfig()); DeploymentOptions brokerOptions = new DeploymentOptions(); brokerOptions.setConfig(dbConfig); vertx.deployVerticle(FeedBroker.class.getName(), brokerOptions, brokerResult -> { if (brokerResult.failed()) { future.fail(brokerResult.cause()); } else { deploymentIds.add(brokerResult.result()); DeploymentOptions webserverOptions = new DeploymentOptions(); webserverOptions.setConfig(dbConfig); vertx.deployVerticle(WebServer.class.getName(), webserverOptions, serverResult -> { if (serverResult.failed()) { future.fail(serverResult.cause()); } else { deploymentIds.add(serverResult.result()); future.complete(); } }); } }); } @Override public void stop(Future<Void> future) { MultipleFutures futures = new MultipleFutures(future); deploymentIds.forEach(deploymentId -> { futures.add(fut -> { undeploy(deploymentId, fut); }); }); futures.start(); } private void undeploy(String deploymentId, Future<Void> future) { vertx.undeploy(deploymentId, res -> { if (res.succeeded()) { future.complete(); } else { future.fail(res.cause()); } }); } private static JsonObject mongoConfig() { JsonObject config = new JsonObject(); config.put("host", "localhost"); config.put("port", MONGO_PORT); config.put("db_name", "vertx-feeds"); return config; } private static JsonObject redisConfig() { JsonObject config = new JsonObject(); config.put("host", "localhost"); config.put("port", REDIS_PORT); return config; } }
Fixes #4
src/main/java/io/vertx/examples/feeds/verticles/MainVerticle.java
Fixes #4
Java
apache-2.0
1537bdbd16c0a52aa21a0b3c5d48b7773e3680fa
0
MER-GROUP/intellij-community,supersven/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,Lekanich/intellij-community,salguarnieri/intellij-community,ftomassetti/intellij-community,suncycheng/intellij-community,clumsy/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,robovm/robovm-studio,suncycheng/intellij-community,ftomassetti/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,samthor/intellij-community,vvv1559/intellij-community,ftomassetti/intellij-community,SerCeMan/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,dslomov/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,michaelgallacher/intellij-community,ftomassetti/intellij-community,kool79/intellij-community,nicolargo/intellij-community,semonte/intellij-community,fengbaicanhe/intellij-community,caot/intellij-community,wreckJ/intellij-community,SerCeMan/intellij-community,jagguli/intellij-community,da1z/intellij-community,xfournet/intellij-community,SerCeMan/intellij-community,fnouama/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,wreckJ/intellij-community,holmes/intellij-community,allotria/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,alphafoobar/intellij-community,mglukhikh/intellij-community,retomerz/intellij-community,hurricup/intellij-community,kool79/intellij-community,xfournet/intellij-community,allotria/intellij-community,nicolargo/intellij-community,youdonghai/intellij-community,robovm/robovm-studio,ftomassetti/intellij-community,Distrotech/intellij-community,vvv1559/intellij-community,samthor/intellij-community,fnouama/intellij-community,signed/intellij-community,mglukhikh/intellij-community,vladmm/intellij-community,samthor/intellij-community,vladmm/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,signed/intellij-community,retomerz/intellij-community,semonte/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,TangHao1987/intellij-community,kdwink/intellij-community,robovm/robovm-studio,asedunov/intellij-community,hurricup/intellij-community,alphafoobar/intellij-community,supersven/intellij-community,da1z/intellij-community,michaelgallacher/intellij-community,MER-GROUP/intellij-community,clumsy/intellij-community,FHannes/intellij-community,ol-loginov/intellij-community,fnouama/intellij-community,nicolargo/intellij-community,kdwink/intellij-community,lucafavatella/intellij-community,amith01994/intellij-community,akosyakov/intellij-community,pwoodworth/intellij-community,ol-loginov/intellij-community,xfournet/intellij-community,wreckJ/intellij-community,apixandru/intellij-community,holmes/intellij-community,supersven/intellij-community,fitermay/intellij-community,izonder/intellij-community,alphafoobar/intellij-community,supersven/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,ftomassetti/intellij-community,fitermay/intellij-community,petteyg/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,alphafoobar/intellij-community,alphafoobar/intellij-community,amith01994/intellij-community,akosyakov/intellij-community,diorcety/intellij-community,caot/intellij-community,akosyakov/intellij-community,akosyakov/intellij-community,MichaelNedzelsky/intellij-community,ol-loginov/intellij-community,ibinti/intellij-community,TangHao1987/intellij-community,orekyuu/intellij-community,nicolargo/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,pwoodworth/intellij-community,xfournet/intellij-community,amith01994/intellij-community,nicolargo/intellij-community,Distrotech/intellij-community,FHannes/intellij-community,adedayo/intellij-community,suncycheng/intellij-community,fnouama/intellij-community,MichaelNedzelsky/intellij-community,mglukhikh/intellij-community,fnouama/intellij-community,gnuhub/intellij-community,adedayo/intellij-community,ol-loginov/intellij-community,petteyg/intellij-community,ThiagoGarciaAlves/intellij-community,kdwink/intellij-community,dslomov/intellij-community,ibinti/intellij-community,robovm/robovm-studio,vvv1559/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,slisson/intellij-community,samthor/intellij-community,TangHao1987/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,semonte/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,petteyg/intellij-community,TangHao1987/intellij-community,petteyg/intellij-community,izonder/intellij-community,xfournet/intellij-community,robovm/robovm-studio,idea4bsd/idea4bsd,mglukhikh/intellij-community,kdwink/intellij-community,ibinti/intellij-community,fengbaicanhe/intellij-community,pwoodworth/intellij-community,orekyuu/intellij-community,ryano144/intellij-community,Distrotech/intellij-community,Distrotech/intellij-community,suncycheng/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,alphafoobar/intellij-community,MER-GROUP/intellij-community,petteyg/intellij-community,gnuhub/intellij-community,da1z/intellij-community,amith01994/intellij-community,nicolargo/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,clumsy/intellij-community,signed/intellij-community,hurricup/intellij-community,wreckJ/intellij-community,tmpgit/intellij-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,ivan-fedorov/intellij-community,Lekanich/intellij-community,ryano144/intellij-community,da1z/intellij-community,izonder/intellij-community,samthor/intellij-community,adedayo/intellij-community,amith01994/intellij-community,salguarnieri/intellij-community,dslomov/intellij-community,nicolargo/intellij-community,supersven/intellij-community,samthor/intellij-community,FHannes/intellij-community,blademainer/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,ibinti/intellij-community,jagguli/intellij-community,ol-loginov/intellij-community,blademainer/intellij-community,ryano144/intellij-community,gnuhub/intellij-community,MER-GROUP/intellij-community,salguarnieri/intellij-community,jagguli/intellij-community,michaelgallacher/intellij-community,pwoodworth/intellij-community,vladmm/intellij-community,holmes/intellij-community,diorcety/intellij-community,pwoodworth/intellij-community,michaelgallacher/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,fitermay/intellij-community,kool79/intellij-community,holmes/intellij-community,ftomassetti/intellij-community,Lekanich/intellij-community,da1z/intellij-community,slisson/intellij-community,youdonghai/intellij-community,gnuhub/intellij-community,da1z/intellij-community,fengbaicanhe/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,TangHao1987/intellij-community,robovm/robovm-studio,ahb0327/intellij-community,apixandru/intellij-community,asedunov/intellij-community,samthor/intellij-community,apixandru/intellij-community,asedunov/intellij-community,TangHao1987/intellij-community,orekyuu/intellij-community,holmes/intellij-community,xfournet/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,jagguli/intellij-community,retomerz/intellij-community,adedayo/intellij-community,supersven/intellij-community,retomerz/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,wreckJ/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,suncycheng/intellij-community,kool79/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,allotria/intellij-community,semonte/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,fitermay/intellij-community,salguarnieri/intellij-community,lucafavatella/intellij-community,ol-loginov/intellij-community,kdwink/intellij-community,slisson/intellij-community,pwoodworth/intellij-community,jagguli/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,blademainer/intellij-community,idea4bsd/idea4bsd,lucafavatella/intellij-community,hurricup/intellij-community,izonder/intellij-community,FHannes/intellij-community,vladmm/intellij-community,tmpgit/intellij-community,wreckJ/intellij-community,hurricup/intellij-community,nicolargo/intellij-community,izonder/intellij-community,ryano144/intellij-community,slisson/intellij-community,ibinti/intellij-community,fitermay/intellij-community,alphafoobar/intellij-community,tmpgit/intellij-community,allotria/intellij-community,wreckJ/intellij-community,FHannes/intellij-community,pwoodworth/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,tmpgit/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,caot/intellij-community,fnouama/intellij-community,holmes/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,Distrotech/intellij-community,wreckJ/intellij-community,suncycheng/intellij-community,tmpgit/intellij-community,ivan-fedorov/intellij-community,SerCeMan/intellij-community,robovm/robovm-studio,retomerz/intellij-community,akosyakov/intellij-community,michaelgallacher/intellij-community,pwoodworth/intellij-community,semonte/intellij-community,xfournet/intellij-community,ivan-fedorov/intellij-community,akosyakov/intellij-community,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,wreckJ/intellij-community,hurricup/intellij-community,fnouama/intellij-community,amith01994/intellij-community,blademainer/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,izonder/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,apixandru/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,MER-GROUP/intellij-community,orekyuu/intellij-community,orekyuu/intellij-community,clumsy/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,da1z/intellij-community,TangHao1987/intellij-community,kdwink/intellij-community,Lekanich/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,Lekanich/intellij-community,ivan-fedorov/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,fengbaicanhe/intellij-community,holmes/intellij-community,caot/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,akosyakov/intellij-community,fnouama/intellij-community,retomerz/intellij-community,ThiagoGarciaAlves/intellij-community,ahb0327/intellij-community,semonte/intellij-community,samthor/intellij-community,akosyakov/intellij-community,slisson/intellij-community,ibinti/intellij-community,SerCeMan/intellij-community,amith01994/intellij-community,jagguli/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,MichaelNedzelsky/intellij-community,apixandru/intellij-community,xfournet/intellij-community,da1z/intellij-community,holmes/intellij-community,MichaelNedzelsky/intellij-community,vvv1559/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,izonder/intellij-community,hurricup/intellij-community,orekyuu/intellij-community,FHannes/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,robovm/robovm-studio,muntasirsyed/intellij-community,diorcety/intellij-community,nicolargo/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,orekyuu/intellij-community,Distrotech/intellij-community,diorcety/intellij-community,Distrotech/intellij-community,dslomov/intellij-community,slisson/intellij-community,michaelgallacher/intellij-community,ivan-fedorov/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,blademainer/intellij-community,retomerz/intellij-community,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,adedayo/intellij-community,asedunov/intellij-community,akosyakov/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,semonte/intellij-community,vladmm/intellij-community,allotria/intellij-community,gnuhub/intellij-community,pwoodworth/intellij-community,holmes/intellij-community,ahb0327/intellij-community,Distrotech/intellij-community,suncycheng/intellij-community,izonder/intellij-community,nicolargo/intellij-community,Lekanich/intellij-community,youdonghai/intellij-community,samthor/intellij-community,dslomov/intellij-community,SerCeMan/intellij-community,diorcety/intellij-community,clumsy/intellij-community,lucafavatella/intellij-community,ol-loginov/intellij-community,ftomassetti/intellij-community,alphafoobar/intellij-community,izonder/intellij-community,asedunov/intellij-community,ftomassetti/intellij-community,asedunov/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,slisson/intellij-community,fnouama/intellij-community,ryano144/intellij-community,allotria/intellij-community,signed/intellij-community,FHannes/intellij-community,asedunov/intellij-community,ryano144/intellij-community,jagguli/intellij-community,da1z/intellij-community,kool79/intellij-community,adedayo/intellij-community,caot/intellij-community,vladmm/intellij-community,izonder/intellij-community,ahb0327/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,alphafoobar/intellij-community,salguarnieri/intellij-community,gnuhub/intellij-community,ahb0327/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,caot/intellij-community,muntasirsyed/intellij-community,tmpgit/intellij-community,tmpgit/intellij-community,ryano144/intellij-community,dslomov/intellij-community,apixandru/intellij-community,da1z/intellij-community,salguarnieri/intellij-community,lucafavatella/intellij-community,asedunov/intellij-community,ryano144/intellij-community,semonte/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,SerCeMan/intellij-community,fnouama/intellij-community,kdwink/intellij-community,caot/intellij-community,adedayo/intellij-community,ThiagoGarciaAlves/intellij-community,clumsy/intellij-community,suncycheng/intellij-community,robovm/robovm-studio,supersven/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,FHannes/intellij-community,alphafoobar/intellij-community,ivan-fedorov/intellij-community,pwoodworth/intellij-community,SerCeMan/intellij-community,allotria/intellij-community,TangHao1987/intellij-community,ivan-fedorov/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,gnuhub/intellij-community,fengbaicanhe/intellij-community,vladmm/intellij-community,TangHao1987/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,kool79/intellij-community,dslomov/intellij-community,robovm/robovm-studio,youdonghai/intellij-community,asedunov/intellij-community,tmpgit/intellij-community,da1z/intellij-community,samthor/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,ol-loginov/intellij-community,TangHao1987/intellij-community,signed/intellij-community,xfournet/intellij-community,petteyg/intellij-community,vladmm/intellij-community,lucafavatella/intellij-community,fengbaicanhe/intellij-community,petteyg/intellij-community,hurricup/intellij-community,amith01994/intellij-community,MichaelNedzelsky/intellij-community,vladmm/intellij-community,signed/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,blademainer/intellij-community,SerCeMan/intellij-community,dslomov/intellij-community,petteyg/intellij-community,allotria/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,salguarnieri/intellij-community,dslomov/intellij-community,MER-GROUP/intellij-community,ahb0327/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,robovm/robovm-studio,salguarnieri/intellij-community,asedunov/intellij-community,apixandru/intellij-community,allotria/intellij-community,Lekanich/intellij-community,jagguli/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,diorcety/intellij-community,salguarnieri/intellij-community,ibinti/intellij-community,blademainer/intellij-community,caot/intellij-community,akosyakov/intellij-community,MER-GROUP/intellij-community,orekyuu/intellij-community,kool79/intellij-community,nicolargo/intellij-community,Lekanich/intellij-community,blademainer/intellij-community,petteyg/intellij-community,wreckJ/intellij-community,slisson/intellij-community,vvv1559/intellij-community,gnuhub/intellij-community,SerCeMan/intellij-community,kool79/intellij-community,diorcety/intellij-community,MichaelNedzelsky/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,dslomov/intellij-community,amith01994/intellij-community,fitermay/intellij-community,ahb0327/intellij-community,fnouama/intellij-community,ryano144/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,kool79/intellij-community,tmpgit/intellij-community,retomerz/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,gnuhub/intellij-community,muntasirsyed/intellij-community,MichaelNedzelsky/intellij-community,adedayo/intellij-community,fengbaicanhe/intellij-community,FHannes/intellij-community,adedayo/intellij-community,kool79/intellij-community,signed/intellij-community,semonte/intellij-community,xfournet/intellij-community,ol-loginov/intellij-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,xfournet/intellij-community,slisson/intellij-community,ahb0327/intellij-community,MER-GROUP/intellij-community,fengbaicanhe/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,holmes/intellij-community,Distrotech/intellij-community,supersven/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,supersven/intellij-community,ibinti/intellij-community,ahb0327/intellij-community,alphafoobar/intellij-community,asedunov/intellij-community,ryano144/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,adedayo/intellij-community,muntasirsyed/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,supersven/intellij-community,petteyg/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,allotria/intellij-community,petteyg/intellij-community,muntasirsyed/intellij-community,clumsy/intellij-community,apixandru/intellij-community,dslomov/intellij-community,holmes/intellij-community,adedayo/intellij-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,ahb0327/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,fitermay/intellij-community,holmes/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,mglukhikh/intellij-community,jagguli/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,mglukhikh/intellij-community,retomerz/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,muntasirsyed/intellij-community,fitermay/intellij-community,kool79/intellij-community,apixandru/intellij-community,semonte/intellij-community,fitermay/intellij-community,diorcety/intellij-community,caot/intellij-community,FHannes/intellij-community,pwoodworth/intellij-community,fnouama/intellij-community,orekyuu/intellij-community,signed/intellij-community,asedunov/intellij-community,caot/intellij-community,clumsy/intellij-community,caot/intellij-community,ahb0327/intellij-community,ftomassetti/intellij-community,slisson/intellij-community,vladmm/intellij-community,gnuhub/intellij-community,da1z/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,retomerz/intellij-community,jagguli/intellij-community,fitermay/intellij-community,supersven/intellij-community,supersven/intellij-community,slisson/intellij-community
package com.jetbrains.python.run; import com.google.common.collect.Lists; import com.intellij.execution.ExecutionException; import com.intellij.execution.RunContentExecutor; import com.intellij.execution.configurations.GeneralCommandLine; import com.intellij.execution.configurations.ParamsGroup; import com.intellij.execution.process.ProcessHandler; import com.intellij.execution.process.ProcessTerminatedListener; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.NotNullFunction; import com.jetbrains.django.util.OSUtil; import com.jetbrains.python.buildout.BuildoutFacet; import com.jetbrains.python.sdk.PythonEnvUtil; import com.jetbrains.python.sdk.PythonSdkType; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Base class for tasks which are run from PyCharm with results displayed in a toolwindow (manage.py, setup.py, Sphinx etc). * * @author yole */ public class PythonTask { protected final Module myModule; private final Sdk mySdk; private String myWorkingDirectory; private String myRunnerScript; private List<String> myParameters = new ArrayList<String>(); private final String myRunTabTitle; private String myHelpId; private Runnable myAfterCompletion; public PythonTask(Module module, String runTabTitle) throws ExecutionException { myModule = module; myRunTabTitle = runTabTitle; mySdk = PythonSdkType.findPythonSdk(module); if (mySdk == null) { throw new ExecutionException("Cannot find Python interpreter for selected module"); } } public String getWorkingDirectory() { return myWorkingDirectory; } public void setWorkingDirectory(String workingDirectory) { myWorkingDirectory = workingDirectory; } public void setRunnerScript(String script) { myRunnerScript = script; } public void setParameters(List<String> parameters) { myParameters = parameters; } public void setHelpId(String helpId) { myHelpId = helpId; } public void setAfterCompletion(Runnable afterCompletion) { myAfterCompletion = afterCompletion; } public ProcessHandler createProcess() throws ExecutionException { GeneralCommandLine commandLine = createCommandLine(); ProcessHandler handler = PythonProcessRunner.createProcessHandlingCtrlC(commandLine); ProcessTerminatedListener.attach(handler); return handler; } public GeneralCommandLine createCommandLine() { GeneralCommandLine cmd = new GeneralCommandLine(); if (myWorkingDirectory != null) { cmd.setWorkDirectory(myWorkingDirectory); } String homePath = mySdk.getHomePath(); if (homePath != null) { homePath = FileUtil.toSystemDependentName(homePath); } PythonCommandLineState.createStandardGroupsIn(cmd); ParamsGroup scriptParams = cmd.getParametersList().getParamsGroup(PythonCommandLineState.GROUP_SCRIPT); assert scriptParams != null; cmd.setPassParentEnvs(true); Map<String, String> envs = new HashMap<String, String>(); if (!SystemInfo.isWindows) { cmd.setExePath("bash"); ParamsGroup bashParams = cmd.getParametersList().addParamsGroupAt(0, "Bash"); bashParams.addParameter("-cl"); NotNullFunction<String, String> escaperFunction = StringUtil.escaper(false, "|>$\"'& "); StringBuilder paramString = new StringBuilder(escaperFunction.fun(homePath) + " " +escaperFunction.fun(myRunnerScript)); for (String p : myParameters) { paramString.append(" ").append(p); } bashParams.addParameter(paramString.toString()); } else { final String PATH_KEY = OSUtil.getPATHenvVariableName(); String sysPath = System.getenv().get(PATH_KEY); if (!StringUtil.isEmpty(sysPath)) { final String path = envs.get(PATH_KEY); envs.put(PATH_KEY, OSUtil.appendToPATHenvVariable(path, sysPath)); } cmd.setExePath(homePath); scriptParams.addParameter(myRunnerScript); scriptParams.addParameters(myParameters); } PythonEnvUtil.setPythonUnbuffered(envs); cmd.setEnvParams(envs); List<String> pythonPath = setupPythonPath(); PythonCommandLineState.initPythonPath(cmd, true, pythonPath, homePath); PythonSdkType.patchCommandLineForVirtualenv(cmd, homePath, true); BuildoutFacet facet = BuildoutFacet.getInstance(myModule); if (facet != null) { facet.patchCommandLineForBuildout(cmd); } return cmd; } protected List<String> setupPythonPath() { List<String> pythonPath = Lists.newArrayList(PythonCommandLineState.getAddedPaths(mySdk)); pythonPath.addAll(PythonCommandLineState.collectPythonPath(myModule)); return pythonPath; } public void run() throws ExecutionException { final ProcessHandler process = createProcess(); final Project project = myModule.getProject(); new RunContentExecutor(project, process) .withFilter(new PythonTracebackFilter(project)) .withTitle(myRunTabTitle) .withRerun(new Runnable() { @Override public void run() { try { PythonTask.this.run(); } catch (ExecutionException e) { Messages.showErrorDialog(e.getMessage(), myRunTabTitle); } } }) .withAfterCompletion(myAfterCompletion) .withHelpId(myHelpId) .run(); } }
python/src/com/jetbrains/python/run/PythonTask.java
package com.jetbrains.python.run; import com.google.common.collect.Lists; import com.intellij.execution.ExecutionException; import com.intellij.execution.RunContentExecutor; import com.intellij.execution.configurations.GeneralCommandLine; import com.intellij.execution.configurations.ParamsGroup; import com.intellij.execution.process.ProcessHandler; import com.intellij.execution.process.ProcessTerminatedListener; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.NotNullFunction; import com.jetbrains.django.util.OSUtil; import com.jetbrains.python.buildout.BuildoutFacet; import com.jetbrains.python.sdk.PythonEnvUtil; import com.jetbrains.python.sdk.PythonSdkType; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Base class for tasks which are run from PyCharm with results displayed in a toolwindow (manage.py, setup.py, Sphinx etc). * * @author yole */ public class PythonTask { protected final Module myModule; private final Sdk mySdk; private String myWorkingDirectory; private String myRunnerScript; private List<String> myParameters = new ArrayList<String>(); private final String myRunTabTitle; private String myHelpId; private Runnable myAfterCompletion; public PythonTask(Module module, String runTabTitle) throws ExecutionException { myModule = module; myRunTabTitle = runTabTitle; mySdk = PythonSdkType.findPythonSdk(module); if (mySdk == null) { throw new ExecutionException("Cannot find Python interpreter for selected module"); } } public String getWorkingDirectory() { return myWorkingDirectory; } public void setWorkingDirectory(String workingDirectory) { myWorkingDirectory = workingDirectory; } public void setRunnerScript(String script) { myRunnerScript = script; } public void setParameters(List<String> parameters) { myParameters = parameters; } public void setHelpId(String helpId) { myHelpId = helpId; } public void setAfterCompletion(Runnable afterCompletion) { myAfterCompletion = afterCompletion; } public ProcessHandler createProcess() throws ExecutionException { GeneralCommandLine commandLine = createCommandLine(); ProcessHandler handler = PythonProcessRunner.createProcessHandlingCtrlC(commandLine); ProcessTerminatedListener.attach(handler); return handler; } public GeneralCommandLine createCommandLine() { GeneralCommandLine cmd = new GeneralCommandLine(); if (myWorkingDirectory != null) { cmd.setWorkDirectory(myWorkingDirectory); } String homePath = mySdk.getHomePath(); if (homePath != null) { homePath = FileUtil.toSystemDependentName(homePath); } PythonCommandLineState.createStandardGroupsIn(cmd); ParamsGroup scriptParams = cmd.getParametersList().getParamsGroup(PythonCommandLineState.GROUP_SCRIPT); assert scriptParams != null; cmd.setPassParentEnvs(true); Map<String, String> envs = new HashMap<String, String>(); if (!SystemInfo.isWindows) { cmd.setExePath("bash"); ParamsGroup bashParams = cmd.getParametersList().addParamsGroupAt(0, "Bash"); bashParams.addParameter("-cl"); NotNullFunction<String, String> escaperFunction = StringUtil.escaper(false, "|>$\"'& "); StringBuilder paramString = new StringBuilder(escaperFunction.fun(homePath) + " " +escaperFunction.fun(myRunnerScript)); for (String p : myParameters) { paramString.append(" ").append(p); } bashParams.addParameter(paramString.toString()); } else { final String PATH_KEY = OSUtil.getPATHenvVariableName(); String sysPath = System.getenv().get(PATH_KEY); if (!StringUtil.isEmpty(sysPath)) { final String path = envs.get(PATH_KEY); envs.put(PATH_KEY, OSUtil.appendToPATHenvVariable(path, sysPath)); } cmd.setExePath(homePath); scriptParams.addParameter(myRunnerScript); scriptParams.addParameters(myParameters); } PythonEnvUtil.setPythonUnbuffered(envs); cmd.setEnvParams(envs); List<String> pythonPath = setupPythonPath(); PythonCommandLineState.initPythonPath(cmd, true, pythonPath, homePath); PythonSdkType.patchCommandLineForVirtualenv(cmd, homePath, false); BuildoutFacet facet = BuildoutFacet.getInstance(myModule); if (facet != null) { facet.patchCommandLineForBuildout(cmd); } return cmd; } protected List<String> setupPythonPath() { List<String> pythonPath = Lists.newArrayList(PythonCommandLineState.getAddedPaths(mySdk)); pythonPath.addAll(PythonCommandLineState.collectPythonPath(myModule)); return pythonPath; } public void run() throws ExecutionException { final ProcessHandler process = createProcess(); final Project project = myModule.getProject(); new RunContentExecutor(project, process) .withFilter(new PythonTracebackFilter(project)) .withTitle(myRunTabTitle) .withRerun(new Runnable() { @Override public void run() { try { PythonTask.this.run(); } catch (ExecutionException e) { Messages.showErrorDialog(e.getMessage(), myRunTabTitle); } } }) .withAfterCompletion(myAfterCompletion) .withHelpId(myHelpId) .run(); } }
inherit parent PATH environment variable when running tasks (PY-6264)
python/src/com/jetbrains/python/run/PythonTask.java
inherit parent PATH environment variable when running tasks (PY-6264)
Java
apache-2.0
94b397c9ad4da52e81e2dbe506f739078597f2d4
0
intentionet/batfish,arifogel/batfish,intentionet/batfish,intentionet/batfish,batfish/batfish,intentionet/batfish,batfish/batfish,intentionet/batfish,dhalperi/batfish,dhalperi/batfish,arifogel/batfish,batfish/batfish,arifogel/batfish,dhalperi/batfish
package org.batfish.main; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FilenameFilter; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.FileVisitResult; import java.nio.file.FileVisitor; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.tree.ParseTreeWalker; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.exception.ExceptionUtils; import org.batfish.collections.EdgeSet; import org.batfish.collections.FibMap; import org.batfish.collections.FibRow; import org.batfish.collections.FibSet; import org.batfish.collections.FlowSinkInterface; import org.batfish.collections.FlowSinkSet; import org.batfish.collections.FunctionSet; import org.batfish.collections.MultiSet; import org.batfish.collections.NodeInterfacePair; import org.batfish.collections.NodeRoleMap; import org.batfish.collections.NodeSet; import org.batfish.collections.PolicyRouteFibIpMap; import org.batfish.collections.PolicyRouteFibNodeMap; import org.batfish.collections.PredicateSemantics; import org.batfish.collections.PredicateValueTypeMap; import org.batfish.collections.QualifiedNameMap; import org.batfish.collections.RoleNodeMap; import org.batfish.collections.RoleSet; import org.batfish.collections.TreeMultiSet; import org.batfish.grammar.BatfishCombinedParser; import org.batfish.grammar.ControlPlaneExtractor; import org.batfish.grammar.ParseTreePrettyPrinter; import org.batfish.grammar.cisco.CiscoCombinedParser; import org.batfish.grammar.cisco.CiscoControlPlaneExtractor; import org.batfish.grammar.flatjuniper.FlatJuniperCombinedParser; import org.batfish.grammar.flatjuniper.FlatJuniperControlPlaneExtractor; import org.batfish.grammar.juniper.JuniperCombinedParser; import org.batfish.grammar.juniper.JuniperFlattener; import org.batfish.grammar.logicblox.LogQLPredicateInfoExtractor; import org.batfish.grammar.logicblox.LogiQLCombinedParser; import org.batfish.grammar.logicblox.LogiQLPredicateInfoResolver; import org.batfish.grammar.topology.BatfishTopologyCombinedParser; import org.batfish.grammar.topology.BatfishTopologyExtractor; import org.batfish.grammar.topology.GNS3TopologyCombinedParser; import org.batfish.grammar.topology.GNS3TopologyExtractor; import org.batfish.grammar.topology.RoleCombinedParser; import org.batfish.grammar.topology.RoleExtractor; import org.batfish.grammar.topology.TopologyExtractor; import org.batfish.grammar.z3.ConcretizerQueryResultCombinedParser; import org.batfish.grammar.z3.ConcretizerQueryResultExtractor; import org.batfish.grammar.z3.DatalogQueryResultCombinedParser; import org.batfish.grammar.z3.DatalogQueryResultExtractor; import org.batfish.logic.LogicResourceLocator; import org.batfish.logicblox.ConfigurationFactExtractor; import org.batfish.logicblox.Facts; import org.batfish.logicblox.LBInitializationException; import org.batfish.logicblox.LBValueType; import org.batfish.logicblox.LogicBloxFrontend; import org.batfish.logicblox.PredicateInfo; import org.batfish.logicblox.ProjectFile; import org.batfish.logicblox.QueryException; import org.batfish.logicblox.TopologyFactExtractor; import org.batfish.representation.BgpNeighbor; import org.batfish.representation.BgpProcess; import org.batfish.representation.Configuration; import org.batfish.representation.Edge; import org.batfish.representation.Interface; import org.batfish.representation.Ip; import org.batfish.representation.IpProtocol; import org.batfish.representation.LineAction; import org.batfish.representation.OspfArea; import org.batfish.representation.OspfProcess; import org.batfish.representation.PolicyMap; import org.batfish.representation.PolicyMapAction; import org.batfish.representation.PolicyMapClause; import org.batfish.representation.PolicyMapMatchRouteFilterListLine; import org.batfish.representation.Prefix; import org.batfish.representation.RouteFilterLine; import org.batfish.representation.RouteFilterList; import org.batfish.representation.Topology; import org.batfish.representation.VendorConfiguration; import org.batfish.representation.cisco.CiscoVendorConfiguration; import org.batfish.util.StringFilter; import org.batfish.util.SubRange; import org.batfish.util.UrlZipExplorer; import org.batfish.util.Util; import org.batfish.z3.ConcretizerQuery; import org.batfish.z3.FailureInconsistencyBlackHoleQuerySynthesizer; import org.batfish.z3.MultipathInconsistencyQuerySynthesizer; import org.batfish.z3.QuerySynthesizer; import org.batfish.z3.ReachableQuerySynthesizer; import org.batfish.z3.RoleReachabilityQuerySynthesizer; import org.batfish.z3.RoleTransitQuerySynthesizer; import org.batfish.z3.Synthesizer; import com.logicblox.bloxweb.client.ServiceClientException; import com.logicblox.connect.Workspace.Relation; import com.thoughtworks.xstream.XStream; import com.thoughtworks.xstream.io.xml.DomDriver; /** * This class encapsulates the main control logic for Batfish. */ public class Batfish implements AutoCloseable { /** * Name of the LogiQL executable block containing basic facts that are true * for any network */ private static final String BASIC_FACTS_BLOCKNAME = "BaseFacts"; /** * Name of the file in which the topology of a network is serialized */ private static final String EDGES_FILENAME = "edges"; /** * Name of the LogiQL data-plane predicate containing next hop information * for policy-routing */ private static final String FIB_POLICY_ROUTE_NEXT_HOP_PREDICATE_NAME = "FibForwardPolicyRouteNextHopIp"; /** * Name of the LogiQL data-plane predicate containing next hop information * for destination-based routing */ private static final String FIB_PREDICATE_NAME = "FibNetwork"; /** * Name of the file in which the destination-routing FIBs are serialized */ private static final String FIBS_FILENAME = "fibs"; /** * Name of the file in which the policy-routing FIBs are serialized */ private static final String FIBS_POLICY_ROUTE_NEXT_HOP_FILENAME = "fibs-policy-route"; /** * Name of the LogiQL predicate containing flow-sink interface tags */ private static final String FLOW_SINK_PREDICATE_NAME = "FlowSinkInterface"; /** * Name of the file in which derived flow-sink interface tags are serialized */ private static final String FLOW_SINKS_FILENAME = "flow-sinks"; private static final String GEN_OSPF_STARTING_IP = "10.0.0.0"; /** * A byte-array containing the first 4 bytes comprising the header for a file * that is the output of java serialization */ private static final byte[] JAVA_SERIALIZED_OBJECT_HEADER = { (byte) 0xac, (byte) 0xed, (byte) 0x00, (byte) 0x05 }; /** * The name of the LogiQL library for org.batfish */ private static final String LB_BATFISH_LIBRARY_NAME = "libbatfish"; /** * The name of the file in which LogiQL predicate type-information and * documentation is serialized */ private static final String PREDICATE_INFO_FILENAME = "predicateInfo.object"; /** * A string containing the system-specific path separator character */ private static final String SEPARATOR = System.getProperty("file.separator"); /** * Role name for generated stubs */ private static final String STUB_ROLE = "generated_stubs"; /** * The name of the [optional] topology file within a test-rig */ private static final String TOPOLOGY_FILENAME = "topology.net"; /** * The name of the LogiQL predicate containing pairs of interfaces in the * same LAN segment */ private static final String TOPOLOGY_PREDICATE_NAME = "LanAdjacent"; private static void initControlPlaneFactBins( Map<String, StringBuilder> factBins) { initFactBins(Facts.CONTROL_PLANE_FACT_COLUMN_HEADERS, factBins); } private static void initFactBins(Map<String, String> columnHeaderMap, Map<String, StringBuilder> factBins) { for (String factPredicate : columnHeaderMap.keySet()) { String columnHeaders = columnHeaderMap.get(factPredicate); String initialText = columnHeaders + "\n"; factBins.put(factPredicate, new StringBuilder(initialText)); } } private static void initTrafficFactBins(Map<String, StringBuilder> factBins) { initFactBins(Facts.TRAFFIC_FACT_COLUMN_HEADERS, factBins); } private List<LogicBloxFrontend> _lbFrontends; private BatfishLogger _logger; private PredicateInfo _predicateInfo; private Settings _settings; private long _timerCount; private File _tmpLogicDir; public Batfish(Settings settings) { _settings = settings; _logger = _settings.getLogger(); _lbFrontends = new ArrayList<LogicBloxFrontend>(); _tmpLogicDir = null; } private void addProject(LogicBloxFrontend lbFrontend) { _logger.info("\n*** ADDING PROJECT ***\n"); resetTimer(); String settingsLogicDir = _settings.getLogicDir(); File logicDir; if (settingsLogicDir != null) { logicDir = new ProjectFile(settingsLogicDir); } else { logicDir = retrieveLogicDir().getAbsoluteFile(); } String result = lbFrontend.addProject(logicDir, ""); cleanupLogicDir(); if (result != null) { throw new BatfishException(result + "\n"); } _logger.info("SUCCESS\n"); printElapsedTime(); } private void addStaticFacts(LogicBloxFrontend lbFrontend, String blockName) { _logger.info("\n*** ADDING STATIC FACTS ***\n"); resetTimer(); _logger.info("Adding " + blockName + "...."); String output = lbFrontend.execNamedBlock(LB_BATFISH_LIBRARY_NAME + ":" + blockName); if (output == null) { _logger.info("OK\n"); } else { throw new BatfishException(output + "\n"); } _logger.info("SUCCESS\n"); printElapsedTime(); } private void anonymizeConfigurations() { // TODO Auto-generated method stub } /** * This function extracts predicate type information from the logic files. It * is meant only to be called during the build process, and should never be * executed from a jar */ private void buildPredicateInfo() { Path logicBinDirPath = null; URL logicSourceURL = LogicResourceLocator.class.getProtectionDomain() .getCodeSource().getLocation(); String logicSourceString = logicSourceURL.toString(); if (logicSourceString.startsWith("onejar:")) { throw new BatfishException( "buildPredicateInfo() should never be called from within a jar"); } String logicPackageResourceName = LogicResourceLocator.class.getPackage() .getName().replace('.', SEPARATOR.charAt(0)); try { logicBinDirPath = Paths.get(LogicResourceLocator.class .getClassLoader().getResource(logicPackageResourceName).toURI()); } catch (URISyntaxException e) { throw new BatfishException("Failed to resolve logic output directory", e); } Path logicSrcDirPath = Paths.get(_settings.getLogicSrcDir()); final Set<Path> logicFiles = new TreeSet<Path>(); try { Files.walkFileTree(logicSrcDirPath, new java.nio.file.SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { String name = file.getFileName().toString(); if (!name.equals("BaseFacts.logic") && !name.endsWith("_rules.logic") && !name.startsWith("service_") && name.endsWith(".logic")) { logicFiles.add(file); } return super.visitFile(file, attrs); } }); } catch (IOException e) { throw new BatfishException("Could not make list of logic files", e); } PredicateValueTypeMap predicateValueTypes = new PredicateValueTypeMap(); QualifiedNameMap qualifiedNameMap = new QualifiedNameMap(); FunctionSet functions = new FunctionSet(); PredicateSemantics predicateSemantics = new PredicateSemantics(); List<ParserRuleContext> trees = new ArrayList<ParserRuleContext>(); for (Path logicFilePath : logicFiles) { String input = readFile(logicFilePath.toFile()); LogiQLCombinedParser parser = new LogiQLCombinedParser(input, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); ParserRuleContext tree = parse(parser, logicFilePath.toString()); trees.add(tree); } ParseTreeWalker walker = new ParseTreeWalker(); for (ParserRuleContext tree : trees) { LogQLPredicateInfoExtractor extractor = new LogQLPredicateInfoExtractor( predicateValueTypes); walker.walk(extractor, tree); } for (ParserRuleContext tree : trees) { LogiQLPredicateInfoResolver resolver = new LogiQLPredicateInfoResolver( predicateValueTypes, qualifiedNameMap, functions, predicateSemantics); walker.walk(resolver, tree); } PredicateInfo predicateInfo = new PredicateInfo(predicateSemantics, predicateValueTypes, functions, qualifiedNameMap); File predicateInfoFile = logicBinDirPath.resolve(PREDICATE_INFO_FILENAME) .toFile(); serializeObject(predicateInfo, predicateInfoFile); } private void cleanupLogicDir() { if (_tmpLogicDir != null) { try { FileUtils.deleteDirectory(_tmpLogicDir); } catch (IOException e) { throw new BatfishException( "Error cleaning up temporary logic directory", e); } _tmpLogicDir = null; } } @Override public void close() throws Exception { for (LogicBloxFrontend lbFrontend : _lbFrontends) { // Close backend threads if (lbFrontend != null && lbFrontend.connected()) { lbFrontend.close(); } } } private void computeDataPlane(LogicBloxFrontend lbFrontend) { _logger.info("\n*** COMPUTING DATA PLANE STRUCTURES ***\n"); resetTimer(); lbFrontend.initEntityTable(); _logger.info("Retrieving flow sink information from LogicBlox..."); FlowSinkSet flowSinks = getFlowSinkSet(lbFrontend); _logger.info("OK\n"); _logger.info("Retrieving topology information from LogicBlox..."); EdgeSet topologyEdges = getTopologyEdges(lbFrontend); _logger.info("OK\n"); String fibQualifiedName = _predicateInfo.getPredicateNames().get( FIB_PREDICATE_NAME); _logger .info("Retrieving destination-routing FIB information from LogicBlox..."); Relation fibNetwork = lbFrontend.queryPredicate(fibQualifiedName); _logger.info("OK\n"); String fibPolicyRouteNextHopQualifiedName = _predicateInfo .getPredicateNames().get(FIB_POLICY_ROUTE_NEXT_HOP_PREDICATE_NAME); _logger .info("Retrieving policy-routing FIB information from LogicBlox..."); Relation fibPolicyRouteNextHops = lbFrontend .queryPredicate(fibPolicyRouteNextHopQualifiedName); _logger.info("OK\n"); _logger.info("Caclulating forwarding rules..."); FibMap fibs = getRouteForwardingRules(fibNetwork, lbFrontend); PolicyRouteFibNodeMap policyRouteFibNodeMap = getPolicyRouteFibNodeMap( fibPolicyRouteNextHops, lbFrontend); _logger.info("OK\n"); Path flowSinksPath = Paths.get(_settings.getDataPlaneDir(), FLOW_SINKS_FILENAME); Path fibsPath = Paths.get(_settings.getDataPlaneDir(), FIBS_FILENAME); Path fibsPolicyRoutePath = Paths.get(_settings.getDataPlaneDir(), FIBS_POLICY_ROUTE_NEXT_HOP_FILENAME); Path edgesPath = Paths.get(_settings.getDataPlaneDir(), EDGES_FILENAME); _logger.info("Serializing flow sink set..."); serializeObject(flowSinks, flowSinksPath.toFile()); _logger.info("OK\n"); _logger.info("Serializing fibs..."); serializeObject(fibs, fibsPath.toFile()); _logger.info("OK\n"); _logger.info("Serializing policy route next hop interface map..."); serializeObject(policyRouteFibNodeMap, fibsPolicyRoutePath.toFile()); _logger.info("OK\n"); _logger.info("Serializing toplogy edges..."); serializeObject(topologyEdges, edgesPath.toFile()); _logger.info("OK\n"); printElapsedTime(); } private void concretize() { _logger.info("\n*** GENERATING Z3 CONCRETIZER QUERIES ***\n"); resetTimer(); String[] concInPaths = _settings.getConcretizerInputFilePaths(); String[] negConcInPaths = _settings.getNegatedConcretizerInputFilePaths(); List<ConcretizerQuery> concretizerQueries = new ArrayList<ConcretizerQuery>(); String blacklistDstIpPath = _settings.getBlacklistDstIpPath(); if (blacklistDstIpPath != null) { String blacklistDstIpFileText = readFile(new File(blacklistDstIpPath)); String[] blacklistDstpIpStrs = blacklistDstIpFileText.split("\n"); Set<Ip> blacklistDstIps = new TreeSet<Ip>(); for (String blacklistDstIpStr : blacklistDstpIpStrs) { Ip blacklistDstIp = new Ip(blacklistDstIpStr); blacklistDstIps.add(blacklistDstIp); } if (blacklistDstIps.size() == 0) { _logger.warn("Warning: empty set of blacklisted destination ips\n"); } ConcretizerQuery blacklistIpQuery = ConcretizerQuery .blacklistDstIpQuery(blacklistDstIps); concretizerQueries.add(blacklistIpQuery); } for (String concInPath : concInPaths) { _logger.info("Reading z3 datalog query output file: \"" + concInPath + "\"..."); File queryOutputFile = new File(concInPath); String queryOutputStr = readFile(queryOutputFile); _logger.info("OK\n"); DatalogQueryResultCombinedParser parser = new DatalogQueryResultCombinedParser( queryOutputStr, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); ParserRuleContext tree = parse(parser, concInPath); _logger.info("Computing concretizer queries..."); ParseTreeWalker walker = new ParseTreeWalker(); DatalogQueryResultExtractor extractor = new DatalogQueryResultExtractor( _settings.concretizeUnique(), false); walker.walk(extractor, tree); _logger.info("OK\n"); List<ConcretizerQuery> currentQueries = extractor .getConcretizerQueries(); if (concretizerQueries.size() == 0) { concretizerQueries.addAll(currentQueries); } else { concretizerQueries = ConcretizerQuery.crossProduct( concretizerQueries, currentQueries); } } if (negConcInPaths != null) { for (String negConcInPath : negConcInPaths) { _logger .info("Reading z3 datalog query output file (to be negated): \"" + negConcInPath + "\"..."); File queryOutputFile = new File(negConcInPath); String queryOutputStr = readFile(queryOutputFile); _logger.info("OK\n"); DatalogQueryResultCombinedParser parser = new DatalogQueryResultCombinedParser( queryOutputStr, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); ParserRuleContext tree = parse(parser, negConcInPath); _logger.info("Computing concretizer queries..."); ParseTreeWalker walker = new ParseTreeWalker(); DatalogQueryResultExtractor extractor = new DatalogQueryResultExtractor( _settings.concretizeUnique(), true); walker.walk(extractor, tree); _logger.info("OK\n"); List<ConcretizerQuery> currentQueries = extractor .getConcretizerQueries(); if (concretizerQueries.size() == 0) { concretizerQueries.addAll(currentQueries); } else { concretizerQueries = ConcretizerQuery.crossProduct( concretizerQueries, currentQueries); } } } for (int i = 0; i < concretizerQueries.size(); i++) { ConcretizerQuery cq = concretizerQueries.get(i); String concQueryPath = _settings.getConcretizerOutputFilePath() + "-" + i + ".smt2"; _logger.info("Writing concretizer query file: \"" + concQueryPath + "\"..."); writeFile(concQueryPath, cq.getText()); _logger.info("OK\n"); } printElapsedTime(); } private LogicBloxFrontend connect() { boolean assumedToExist = !_settings.createWorkspace(); String workspaceMaster = _settings.getWorkspaceName(); if (assumedToExist) { String jobLogicBloxHostnamePath = _settings .getJobLogicBloxHostnamePath(); if (jobLogicBloxHostnamePath != null) { String lbHostname = readFile(new File(jobLogicBloxHostnamePath)); _settings.setConnectBloxHost(lbHostname); } } LogicBloxFrontend lbFrontend = null; try { lbFrontend = initFrontend(assumedToExist, workspaceMaster); } catch (LBInitializationException e) { throw new BatfishException("Failed to connect to LogicBlox", e); } return lbFrontend; } private Map<String, Configuration> convertConfigurations( Map<String, VendorConfiguration> vendorConfigurations) { boolean processingError = false; Map<String, Configuration> configurations = new TreeMap<String, Configuration>(); _logger .info("\n*** CONVERTING VENDOR CONFIGURATIONS TO INDEPENDENT FORMAT ***\n"); resetTimer(); boolean pedanticAsError = _settings.getPedanticAsError(); boolean pedanticRecord = _settings.getPedanticRecord(); boolean redFlagAsError = _settings.getRedFlagAsError(); boolean redFlagRecord = _settings.getRedFlagRecord(); boolean unimplementedAsError = _settings.getUnimplementedAsError(); boolean unimplementedRecord = _settings.getUnimplementedRecord(); for (String name : vendorConfigurations.keySet()) { _logger.debug("Processing: \"" + name + "\""); VendorConfiguration vc = vendorConfigurations.get(name); Warnings warnings = new Warnings(pedanticAsError, pedanticRecord, redFlagAsError, redFlagRecord, unimplementedAsError, unimplementedRecord, false); try { Configuration config = vc .toVendorIndependentConfiguration(warnings); configurations.put(name, config); _logger.debug(" ...OK\n"); } catch (BatfishException e) { _logger.fatal("...CONVERSION ERROR\n"); _logger.fatal(ExceptionUtils.getStackTrace(e)); processingError = true; if (_settings.exitOnParseError()) { break; } else { continue; } } finally { for (String warning : warnings.getRedFlagWarnings()) { _logger.redflag(warning); } for (String warning : warnings.getUnimplementedWarnings()) { _logger.unimplemented(warning); } for (String warning : warnings.getPedanticWarnings()) { _logger.pedantic(warning); } } } if (processingError) { throw new BatfishException("Vendor conversion error(s)"); } else { printElapsedTime(); return configurations; } } public Map<String, Configuration> deserializeConfigurations( String serializedConfigPath) { _logger .info("\n*** DESERIALIZING VENDOR-INDEPENDENT CONFIGURATION STRUCTURES ***\n"); resetTimer(); Map<String, Configuration> configurations = new TreeMap<String, Configuration>(); File dir = new File(serializedConfigPath); File[] serializedConfigs = dir.listFiles(); if (serializedConfigs == null) { throw new BatfishException( "Error reading vendor-independent configs directory"); } for (File serializedConfig : serializedConfigs) { String name = serializedConfig.getName(); _logger.debug("Reading config: \"" + serializedConfig + "\""); Object object = deserializeObject(serializedConfig); Configuration c = (Configuration) object; configurations.put(name, c); _logger.debug(" ...OK\n"); } disableBlacklistedInterface(configurations); disableBlacklistedNode(configurations); printElapsedTime(); return configurations; } private Object deserializeObject(File inputFile) { FileInputStream fis; Object o = null; ObjectInputStream ois; try { fis = new FileInputStream(inputFile); if (!isJavaSerializationData(inputFile)) { XStream xstream = new XStream(new DomDriver("UTF-8")); ois = xstream.createObjectInputStream(fis); } else { ois = new ObjectInputStream(fis); } o = ois.readObject(); ois.close(); } catch (IOException | ClassNotFoundException e) { throw new BatfishException("Failed to deserialize object from file: " + inputFile.toString(), e); } return o; } public Map<String, VendorConfiguration> deserializeVendorConfigurations( String serializedVendorConfigPath) { _logger.info("\n*** DESERIALIZING VENDOR CONFIGURATION STRUCTURES ***\n"); resetTimer(); Map<String, VendorConfiguration> vendorConfigurations = new TreeMap<String, VendorConfiguration>(); File dir = new File(serializedVendorConfigPath); File[] serializedConfigs = dir.listFiles(); if (serializedConfigs == null) { throw new BatfishException("Error reading vendor configs directory"); } for (File serializedConfig : serializedConfigs) { String name = serializedConfig.getName(); _logger.debug("Reading vendor config: \"" + serializedConfig + "\""); Object object = deserializeObject(serializedConfig); VendorConfiguration vc = (VendorConfiguration) object; vendorConfigurations.put(name, vc); _logger.debug("...OK\n"); } printElapsedTime(); return vendorConfigurations; } private void disableBlacklistedInterface( Map<String, Configuration> configurations) { String blacklistInterfaceString = _settings.getBlacklistInterfaceString(); if (blacklistInterfaceString != null) { String[] blacklistInterfaceStringParts = blacklistInterfaceString .split(","); String blacklistInterfaceNode = blacklistInterfaceStringParts[0]; String blacklistInterfaceName = blacklistInterfaceStringParts[1]; Configuration c = configurations.get(blacklistInterfaceNode); Interface i = c.getInterfaces().get(blacklistInterfaceName); i.setActive(false); } } private void disableBlacklistedNode(Map<String, Configuration> configurations) { String blacklistNode = _settings.getBlacklistNode(); if (blacklistNode != null) { if (!configurations.containsKey(blacklistNode)) { throw new BatfishException("Cannot blacklist non-existent node: " + blacklistNode); } Configuration configuration = configurations.get(blacklistNode); for (Interface iface : configuration.getInterfaces().values()) { iface.setActive(false); } } } private void dumpFacts(Map<String, StringBuilder> factBins) { _logger.info("\n*** DUMPING FACTS ***\n"); resetTimer(); Path factsDir = Paths.get(_settings.getDumpFactsDir()); try { Files.createDirectories(factsDir); for (String factsFilename : factBins.keySet()) { String facts = factBins.get(factsFilename).toString(); Path factsFilePath = factsDir.resolve(factsFilename); _logger.info("Writing: \"" + factsFilePath.toAbsolutePath().toString() + "\"\n"); FileUtils.write(factsFilePath.toFile(), facts); } } catch (IOException e) { throw new BatfishException("Failed to write fact dump file", e); } printElapsedTime(); } private void dumpInterfaceDescriptions(String testRigPath, String outputPath) { Map<File, String> configurationData = readConfigurationFiles(testRigPath); Map<String, VendorConfiguration> configs = parseVendorConfigurations(configurationData); Map<String, VendorConfiguration> sortedConfigs = new TreeMap<String, VendorConfiguration>(); sortedConfigs.putAll(configs); StringBuilder sb = new StringBuilder(); for (VendorConfiguration vconfig : sortedConfigs.values()) { String node = vconfig.getHostname(); CiscoVendorConfiguration config = null; try { config = (CiscoVendorConfiguration) vconfig; } catch (ClassCastException e) { continue; } Map<String, org.batfish.representation.cisco.Interface> sortedInterfaces = new TreeMap<String, org.batfish.representation.cisco.Interface>(); sortedInterfaces.putAll(config.getInterfaces()); for (org.batfish.representation.cisco.Interface iface : sortedInterfaces .values()) { String iname = iface.getName(); String description = iface.getDescription(); sb.append(node + " " + iname); if (description != null) { sb.append(" \"" + description + "\""); } sb.append("\n"); } } String output = sb.toString(); writeFile(outputPath, output); } private String flatten(String input) { JuniperCombinedParser jparser = new JuniperCombinedParser(input, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); ParserRuleContext jtree = parse(jparser); JuniperFlattener flattener = new JuniperFlattener(); ParseTreeWalker walker = new ParseTreeWalker(); walker.walk(flattener, jtree); return flattener.getFlattenedConfigurationText(); } private void flatten(String inputPath, String outputPath) { File inputFolder = new File(inputPath); File[] configs = inputFolder.listFiles(); if (configs == null) { throw new BatfishException("Error reading configs from input test rig"); } try { Files.createDirectories(Paths.get(outputPath)); } catch (IOException e) { throw new BatfishException( "Could not create output testrig directory", e); } for (File config : configs) { String name = config.getName(); _logger.debug("Reading config: \"" + config + "\""); String configText = readFile(config); _logger.debug("..OK\n"); File outputFile = Paths.get(outputPath, name).toFile(); String outputFileAsString = outputFile.toString(); if (configText.charAt(0) == '#' && !configText.matches("(?m)set version.*")) { _logger.debug("Flattening config to \"" + outputFileAsString + "\"..."); String flatConfigText = flatten(configText); writeFile(outputFileAsString, flatConfigText); } else { _logger.debug("Copying unmodified config to \"" + outputFileAsString + "\"..."); writeFile(outputFileAsString, configText); _logger.debug("OK\n"); } } } private void genBlackHoleQueries() { _logger.info("\n*** GENERATING BLACK-HOLE QUERIES ***\n"); resetTimer(); String fiQueryBasePath = _settings.getBlackHoleQueryPath(); String nodeSetPath = _settings.getNodeSetPath(); _logger.info("Reading node set from : \"" + nodeSetPath + "\"..."); NodeSet nodes = (NodeSet) deserializeObject(new File(nodeSetPath)); _logger.info("OK\n"); for (String hostname : nodes) { QuerySynthesizer synth = new FailureInconsistencyBlackHoleQuerySynthesizer( hostname); String queryText = synth.getQueryText(); String fiQueryPath; fiQueryPath = fiQueryBasePath + "-" + hostname + ".smt2"; _logger.info("Writing query to: \"" + fiQueryPath + "\"..."); writeFile(fiQueryPath, queryText); _logger.info("OK\n"); } printElapsedTime(); } private void generateOspfConfigs(String topologyPath, String outputPath) { File topologyFilePath = new File(topologyPath); Topology topology = parseTopology(topologyFilePath); Map<String, Configuration> configs = new TreeMap<String, Configuration>(); NodeSet allNodes = new NodeSet(); Map<NodeInterfacePair, Set<NodeInterfacePair>> interfaceMap = new HashMap<NodeInterfacePair, Set<NodeInterfacePair>>(); // first we collect set of all mentioned nodes, and build mapping from // each interface to the set of interfaces that connect to each other for (Edge edge : topology.getEdges()) { allNodes.add(edge.getNode1()); allNodes.add(edge.getNode2()); NodeInterfacePair interface1 = new NodeInterfacePair(edge.getNode1(), edge.getInt1()); NodeInterfacePair interface2 = new NodeInterfacePair(edge.getNode2(), edge.getInt2()); Set<NodeInterfacePair> interfaceSet = interfaceMap.get(interface1); if (interfaceSet == null) { interfaceSet = new HashSet<NodeInterfacePair>(); } interfaceMap.put(interface1, interfaceSet); interfaceMap.put(interface2, interfaceSet); interfaceSet.add(interface1); interfaceSet.add(interface2); } // then we create configs for every mentioned node for (String hostname : allNodes) { Configuration config = new Configuration(hostname); configs.put(hostname, config); } // Now we create interfaces for each edge and record the number of // neighbors so we know how large to make the subnet long currentStartingIpAsLong = new Ip(GEN_OSPF_STARTING_IP).asLong(); Set<Set<NodeInterfacePair>> interfaceSets = new HashSet<Set<NodeInterfacePair>>(); interfaceSets.addAll(interfaceMap.values()); for (Set<NodeInterfacePair> interfaceSet : interfaceSets) { int numInterfaces = interfaceSet.size(); if (numInterfaces < 2) { throw new BatfishException( "The following interface set contains less than two interfaces: " + interfaceSet.toString()); } int numHostBits = 0; for (int shiftedValue = numInterfaces - 1; shiftedValue != 0; shiftedValue >>= 1, numHostBits++) { } int subnetBits = 32 - numHostBits; int offset = 0; for (NodeInterfacePair currentPair : interfaceSet) { Ip ip = new Ip(currentStartingIpAsLong + offset); Prefix prefix = new Prefix(ip, subnetBits); String ifaceName = currentPair.getInterface(); Interface iface = new Interface(ifaceName); iface.setPrefix(prefix); // dirty hack for setting bandwidth for now double ciscoBandwidth = org.batfish.representation.cisco.Interface .getDefaultBandwidth(ifaceName); double juniperBandwidth = org.batfish.representation.juniper.Interface .getDefaultBandwidthByName(ifaceName); double bandwidth = Math.min(ciscoBandwidth, juniperBandwidth); iface.setBandwidth(bandwidth); String hostname = currentPair.getHostname(); Configuration config = configs.get(hostname); config.getInterfaces().put(ifaceName, iface); offset++; } currentStartingIpAsLong += (1 << numHostBits); } for (Configuration config : configs.values()) { // use cisco arbitrarily config.setVendor(CiscoVendorConfiguration.VENDOR_NAME); OspfProcess proc = new OspfProcess(); config.setOspfProcess(proc); proc.setReferenceBandwidth(org.batfish.representation.cisco.OspfProcess.DEFAULT_REFERENCE_BANDWIDTH); long backboneArea = 0; OspfArea area = new OspfArea(backboneArea); proc.getAreas().put(backboneArea, area); area.getInterfaces().addAll(config.getInterfaces().values()); } serializeIndependentConfigs(configs, outputPath); } private void generateStubs(String inputRole, int stubAs, String interfaceDescriptionRegex, String configPath) { Map<String, Configuration> configs = deserializeConfigurations(configPath); Pattern pattern = Pattern.compile(interfaceDescriptionRegex); Map<String, Configuration> stubConfigurations = new TreeMap<String, Configuration>(); _logger.info("\n*** GENERATING STUBS ***\n"); resetTimer(); // load old node-roles to be updated at end RoleSet stubRoles = new RoleSet(); stubRoles.add(STUB_ROLE); File nodeRolesPath = new File(_settings.getNodeRolesPath()); _logger.info("Deserializing old node-roles mappings: \"" + nodeRolesPath + "\" ..."); NodeRoleMap nodeRoles = (NodeRoleMap) deserializeObject(nodeRolesPath); _logger.info("OK\n"); // create origination policy common to all stubs String stubOriginationPolicyName = "~STUB_ORIGINATION_POLICY~"; PolicyMap stubOriginationPolicy = new PolicyMap(stubOriginationPolicyName); PolicyMapClause clause = new PolicyMapClause(); stubOriginationPolicy.getClauses().add(clause); String stubOriginationRouteFilterListName = "~STUB_ORIGINATION_ROUTE_FILTER~"; RouteFilterList rf = new RouteFilterList( stubOriginationRouteFilterListName); RouteFilterLine rfl = new RouteFilterLine(LineAction.ACCEPT, Prefix.ZERO, new SubRange(0, 0)); rf.addLine(rfl); PolicyMapMatchRouteFilterListLine matchLine = new PolicyMapMatchRouteFilterListLine( Collections.singleton(rf)); clause.getMatchLines().add(matchLine); clause.setAction(PolicyMapAction.PERMIT); // create flow sink interface common to all stubs String flowSinkName = "TenGibabitEthernet100/100"; Interface flowSink = new Interface(flowSinkName); flowSink.setPrefix(Prefix.ZERO); flowSink.setActive(true); flowSink.setBandwidth(10E9d); Set<String> skipWarningNodes = new HashSet<String>(); for (Configuration config : configs.values()) { if (!config.getRoles().contains(inputRole)) { continue; } for (BgpNeighbor neighbor : config.getBgpProcess().getNeighbors() .values()) { if (!neighbor.getRemoteAs().equals(stubAs)) { continue; } Prefix neighborPrefix = neighbor.getPrefix(); if (neighborPrefix.getPrefixLength() != 32) { throw new BatfishException( "do not currently handle generating stubs based on dynamic bgp sessions"); } Ip neighborAddress = neighborPrefix.getAddress(); int edgeAs = neighbor.getLocalAs(); /* * Now that we have the ip address of the stub, we want to find the * interface that connects to it. We will extract the hostname for * the stub from the description of this interface using the * supplied regex. */ boolean found = false; for (Interface iface : config.getInterfaces().values()) { Prefix prefix = iface.getPrefix(); if (prefix == null || !prefix.contains(neighborAddress)) { continue; } // the neighbor address falls within the network assigned to this // interface, so now we check the description String description = iface.getDescription(); Matcher matcher = pattern.matcher(description); if (matcher.find()) { String hostname = matcher.group(1); if (configs.containsKey(hostname)) { Configuration duplicateConfig = configs.get(hostname); if (!duplicateConfig.getRoles().contains(STUB_ROLE) || duplicateConfig.getRoles().size() != 1) { throw new BatfishException( "A non-generated node with hostname: \"" + hostname + "\" already exists in network under analysis"); } else { if (!skipWarningNodes.contains(hostname)) { _logger .warn("WARNING: Overwriting previously generated node: \"" + hostname + "\"\n"); skipWarningNodes.add(hostname); } } } found = true; Configuration stub = stubConfigurations.get(hostname); // create stub if it doesn't exist yet if (stub == null) { stub = new Configuration(hostname); stubConfigurations.put(hostname, stub); stub.getInterfaces().put(flowSinkName, flowSink); stub.setBgpProcess(new BgpProcess()); stub.getPolicyMaps().put(stubOriginationPolicyName, stubOriginationPolicy); stub.getRouteFilterLists().put( stubOriginationRouteFilterListName, rf); stub.setVendor(CiscoVendorConfiguration.VENDOR_NAME); stub.setRoles(stubRoles); nodeRoles.put(hostname, stubRoles); } // create interface that will on which peering will occur Map<String, Interface> stubInterfaces = stub.getInterfaces(); String stubInterfaceName = "TenGigabitEthernet0/" + (stubInterfaces.size() - 1); Interface stubInterface = new Interface(stubInterfaceName); stubInterfaces.put(stubInterfaceName, stubInterface); stubInterface.setPrefix(new Prefix(neighborAddress, prefix .getPrefixLength())); stubInterface.setActive(true); stubInterface.setBandwidth(10E9d); // create neighbor within bgp process BgpNeighbor edgeNeighbor = new BgpNeighbor(prefix); edgeNeighbor.getOriginationPolicies().add( stubOriginationPolicy); edgeNeighbor.setRemoteAs(edgeAs); edgeNeighbor.setLocalAs(stubAs); edgeNeighbor.setSendCommunity(true); edgeNeighbor.setDefaultMetric(0); stub.getBgpProcess().getNeighbors() .put(edgeNeighbor.getPrefix(), edgeNeighbor); break; } else { throw new BatfishException( "Unable to derive stub hostname from interface description: \"" + description + "\" using regex: \"" + interfaceDescriptionRegex + "\""); } } if (!found) { throw new BatfishException( "Could not determine stub hostname corresponding to ip: \"" + neighborAddress.toString() + "\" listed as neighbor on router: \"" + config.getHostname() + "\""); } } } // write updated node-roles mappings to disk _logger.info("Serializing updated node-roles mappings: \"" + nodeRolesPath + "\" ..."); serializeObject(nodeRoles, nodeRolesPath); _logger.info("OK\n"); printElapsedTime(); // write stubs to disk serializeIndependentConfigs(stubConfigurations, configPath); } private void genMultipathQueries() { _logger.info("\n*** GENERATING MULTIPATH-INCONSISTENCY QUERIES ***\n"); resetTimer(); String mpiQueryBasePath = _settings.getMultipathInconsistencyQueryPath(); String nodeSetPath = _settings.getNodeSetPath(); String nodeSetTextPath = nodeSetPath + ".txt"; _logger.info("Reading node set from : \"" + nodeSetPath + "\"..."); NodeSet nodes = (NodeSet) deserializeObject(new File(nodeSetPath)); _logger.info("OK\n"); for (String hostname : nodes) { QuerySynthesizer synth = new MultipathInconsistencyQuerySynthesizer( hostname); String queryText = synth.getQueryText(); String mpiQueryPath = mpiQueryBasePath + "-" + hostname + ".smt2"; _logger.info("Writing query to: \"" + mpiQueryPath + "\"..."); writeFile(mpiQueryPath, queryText); _logger.info("OK\n"); } _logger.info("Writing node lines for next stage..."); StringBuilder sb = new StringBuilder(); for (String node : nodes) { sb.append(node + "\n"); } writeFile(nodeSetTextPath, sb.toString()); _logger.info("OK\n"); printElapsedTime(); } private void genReachableQueries() { _logger.info("\n*** GENERATING REACHABLE QUERIES ***\n"); resetTimer(); String queryBasePath = _settings.getReachableQueryPath(); String nodeSetPath = _settings.getNodeSetPath(); String acceptNode = _settings.getAcceptNode(); String blacklistedNode = _settings.getBlacklistNode(); _logger.info("Reading node set from : \"" + nodeSetPath + "\"..."); NodeSet nodes = (NodeSet) deserializeObject(new File(nodeSetPath)); _logger.info("OK\n"); for (String hostname : nodes) { if (hostname.equals(acceptNode) || hostname.equals(blacklistedNode)) { continue; } QuerySynthesizer synth = new ReachableQuerySynthesizer(hostname, acceptNode); String queryText = synth.getQueryText(); String queryPath; queryPath = queryBasePath + "-" + hostname + ".smt2"; _logger.info("Writing query to: \"" + queryPath + "\"..."); writeFile(queryPath, queryText); _logger.info("OK\n"); } printElapsedTime(); } private void genRoleReachabilityQueries() { _logger.info("\n*** GENERATING NODE-TO-ROLE QUERIES ***\n"); resetTimer(); String queryBasePath = _settings.getRoleReachabilityQueryPath(); String nodeSetPath = _settings.getNodeSetPath(); String nodeSetTextPath = nodeSetPath + ".txt"; String roleSetTextPath = _settings.getRoleSetPath(); String nodeRolesPath = _settings.getNodeRolesPath(); String iterationsPath = nodeRolesPath + ".iterations"; _logger.info("Reading node set from : \"" + nodeSetPath + "\"..."); NodeSet nodes = (NodeSet) deserializeObject(new File(nodeSetPath)); _logger.info("OK\n"); _logger.info("Reading node roles from : \"" + nodeRolesPath + "\"..."); NodeRoleMap nodeRoles = (NodeRoleMap) deserializeObject(new File( nodeRolesPath)); _logger.info("OK\n"); RoleNodeMap roleNodes = nodeRoles.toRoleNodeMap(); for (String hostname : nodes) { for (String role : roleNodes.keySet()) { QuerySynthesizer synth = new RoleReachabilityQuerySynthesizer( hostname, role); String queryText = synth.getQueryText(); String queryPath = queryBasePath + "-" + hostname + "-" + role + ".smt2"; _logger.info("Writing query to: \"" + queryPath + "\"..."); writeFile(queryPath, queryText); _logger.info("OK\n"); } } _logger.info("Writing node lines for next stage..."); StringBuilder sbNodes = new StringBuilder(); for (String node : nodes) { sbNodes.append(node + "\n"); } writeFile(nodeSetTextPath, sbNodes.toString()); _logger.info("OK\n"); StringBuilder sbRoles = new StringBuilder(); _logger.info("Writing role lines for next stage..."); sbRoles = new StringBuilder(); for (String role : roleNodes.keySet()) { sbRoles.append(role + "\n"); } writeFile(roleSetTextPath, sbRoles.toString()); _logger.info("OK\n"); _logger .info("Writing role-node-role iteration ordering lines for concretizer stage..."); StringBuilder sbIterations = new StringBuilder(); for (Entry<String, NodeSet> roleNodeEntry : roleNodes.entrySet()) { String transmittingRole = roleNodeEntry.getKey(); NodeSet transmittingNodes = roleNodeEntry.getValue(); if (transmittingNodes.size() < 2) { continue; } String[] tNodeArray = transmittingNodes.toArray(new String[] {}); String masterNode = tNodeArray[0]; for (int i = 1; i < tNodeArray.length; i++) { String slaveNode = tNodeArray[i]; for (String receivingRole : roleNodes.keySet()) { String iterationLine = transmittingRole + ":" + masterNode + ":" + slaveNode + ":" + receivingRole + "\n"; sbIterations.append(iterationLine); } } } writeFile(iterationsPath, sbIterations.toString()); _logger.info("OK\n"); printElapsedTime(); } private void genRoleTransitQueries() { _logger.info("\n*** GENERATING ROLE-TO-NODE QUERIES ***\n"); resetTimer(); String queryBasePath = _settings.getRoleTransitQueryPath(); String nodeSetPath = _settings.getNodeSetPath(); String nodeSetTextPath = nodeSetPath + ".txt"; String roleSetTextPath = _settings.getRoleSetPath(); String nodeRolesPath = _settings.getNodeRolesPath(); String roleNodesPath = _settings.getRoleNodesPath(); String iterationsPath = nodeRolesPath + ".rtiterations"; String constraintsIterationsPath = nodeRolesPath + ".rtconstraintsiterations"; _logger.info("Reading node set from : \"" + nodeSetPath + "\"..."); NodeSet nodes = (NodeSet) deserializeObject(new File(nodeSetPath)); _logger.info("OK\n"); _logger.info("Reading node roles from : \"" + nodeRolesPath + "\"..."); NodeRoleMap nodeRoles = (NodeRoleMap) deserializeObject(new File( nodeRolesPath)); _logger.info("OK\n"); RoleNodeMap roleNodes = nodeRoles.toRoleNodeMap(); for (Entry<String, NodeSet> sourceEntry : roleNodes.entrySet()) { String sourceRole = sourceEntry.getKey(); for (Entry<String, NodeSet> transitEntry : roleNodes.entrySet()) { String transitRole = transitEntry.getKey(); if (transitRole.equals(sourceRole)) { continue; } NodeSet transitNodes = transitEntry.getValue(); for (String transitNode : transitNodes) { QuerySynthesizer synth = new RoleTransitQuerySynthesizer( sourceRole, transitNode); String queryText = synth.getQueryText(); String queryPath = queryBasePath + "-" + transitNode + "-" + sourceRole + ".smt2"; _logger.info("Writing query to: \"" + queryPath + "\"..."); writeFile(queryPath, queryText); _logger.info("OK\n"); } } } _logger.info("Writing node lines for next stage..."); StringBuilder sbNodes = new StringBuilder(); for (String node : nodes) { sbNodes.append(node + "\n"); } writeFile(nodeSetTextPath, sbNodes.toString()); _logger.info("OK\n"); StringBuilder sbRoles = new StringBuilder(); _logger.info("Writing role lines for next stage..."); sbRoles = new StringBuilder(); for (String role : roleNodes.keySet()) { sbRoles.append(role + "\n"); } writeFile(roleSetTextPath, sbRoles.toString()); _logger.info("OK\n"); // not actually sure if this is necessary StringBuilder sbRoleNodes = new StringBuilder(); _logger.info("Writing role-node mappings for concretizer stage..."); sbRoleNodes = new StringBuilder(); for (Entry<String, NodeSet> e : roleNodes.entrySet()) { String role = e.getKey(); NodeSet currentNodes = e.getValue(); sbRoleNodes.append(role + ":"); for (String node : currentNodes) { sbRoleNodes.append(node + ","); } sbRoleNodes.append(role + "\n"); } writeFile(roleNodesPath, sbRoleNodes.toString()); _logger .info("Writing transitrole-transitnode-sourcerole iteration ordering lines for constraints stage..."); StringBuilder sbConstraintsIterations = new StringBuilder(); for (Entry<String, NodeSet> roleNodeEntry : roleNodes.entrySet()) { String transitRole = roleNodeEntry.getKey(); NodeSet transitNodes = roleNodeEntry.getValue(); if (transitNodes.size() < 2) { continue; } for (String sourceRole : roleNodes.keySet()) { if (sourceRole.equals(transitRole)) { continue; } for (String transitNode : transitNodes) { String iterationLine = transitRole + ":" + transitNode + ":" + sourceRole + "\n"; sbConstraintsIterations.append(iterationLine); } } } writeFile(constraintsIterationsPath, sbConstraintsIterations.toString()); _logger.info("OK\n"); _logger .info("Writing transitrole-master-slave-sourcerole iteration ordering lines for concretizer stage..."); StringBuilder sbIterations = new StringBuilder(); for (Entry<String, NodeSet> roleNodeEntry : roleNodes.entrySet()) { String transitRole = roleNodeEntry.getKey(); NodeSet transitNodes = roleNodeEntry.getValue(); if (transitNodes.size() < 2) { continue; } String[] tNodeArray = transitNodes.toArray(new String[] {}); String masterNode = tNodeArray[0]; for (int i = 1; i < tNodeArray.length; i++) { String slaveNode = tNodeArray[i]; for (String sourceRole : roleNodes.keySet()) { if (sourceRole.equals(transitRole)) { continue; } String iterationLine = transitRole + ":" + masterNode + ":" + slaveNode + ":" + sourceRole + "\n"; sbIterations.append(iterationLine); } } } writeFile(iterationsPath, sbIterations.toString()); _logger.info("OK\n"); printElapsedTime(); } private void genZ3(Map<String, Configuration> configurations) { _logger.info("\n*** GENERATING Z3 LOGIC ***\n"); resetTimer(); Path flowSinkSetPath = Paths.get(_settings.getDataPlaneDir(), FLOW_SINKS_FILENAME); Path fibsPath = Paths.get(_settings.getDataPlaneDir(), FIBS_FILENAME); Path prFibsPath = Paths.get(_settings.getDataPlaneDir(), FIBS_POLICY_ROUTE_NEXT_HOP_FILENAME); Path edgesPath = Paths.get(_settings.getDataPlaneDir(), EDGES_FILENAME); _logger.info("Deserializing flow sink interface set: \"" + flowSinkSetPath.toString() + "\"..."); FlowSinkSet flowSinks = (FlowSinkSet) deserializeObject(flowSinkSetPath .toFile()); _logger.info("OK\n"); _logger.info("Deserializing destination route fibs: \"" + fibsPath.toString() + "\"..."); FibMap fibs = (FibMap) deserializeObject(fibsPath.toFile()); _logger.info("OK\n"); _logger.info("Deserializing policy route fibs: \"" + prFibsPath.toString() + "\"..."); PolicyRouteFibNodeMap prFibs = (PolicyRouteFibNodeMap) deserializeObject(prFibsPath .toFile()); _logger.info("OK\n"); _logger.info("Deserializing toplogy edges: \"" + edgesPath.toString() + "\"..."); EdgeSet topologyEdges = (EdgeSet) deserializeObject(edgesPath.toFile()); _logger.info("OK\n"); _logger.info("Synthesizing Z3 logic..."); Synthesizer s = new Synthesizer(configurations, fibs, prFibs, topologyEdges, _settings.getSimplify(), flowSinks); String result = s.synthesize(); List<String> warnings = s.getWarnings(); int numWarnings = warnings.size(); if (numWarnings == 0) { _logger.info("OK\n"); } else { for (String warning : warnings) { _logger.warn(warning); } } String outputPath = _settings.getZ3File(); _logger.info("Writing Z3 logic: \"" + outputPath + "\"..."); File z3Out = new File(outputPath); z3Out.delete(); writeFile(outputPath, result); _logger.info("OK\n"); String nodeSetPath = _settings.getNodeSetPath(); _logger.info("Serializing node set: \"" + nodeSetPath + "\"..."); NodeSet nodeSet = s.getNodeSet(); serializeObject(nodeSet, new File(nodeSetPath)); _logger.info("OK\n"); printElapsedTime(); } public Map<String, Configuration> getConfigurations( String serializedVendorConfigPath) { Map<String, VendorConfiguration> vendorConfigurations = deserializeVendorConfigurations(serializedVendorConfigPath); Map<String, Configuration> configurations = convertConfigurations(vendorConfigurations); return configurations; } private double getElapsedTime(long beforeTime) { long difference = System.currentTimeMillis() - beforeTime; double seconds = difference / 1000d; return seconds; } private FlowSinkSet getFlowSinkSet(LogicBloxFrontend lbFrontend) { FlowSinkSet flowSinks = new FlowSinkSet(); String qualifiedName = _predicateInfo.getPredicateNames().get( FLOW_SINK_PREDICATE_NAME); Relation flowSinkRelation = lbFrontend.queryPredicate(qualifiedName); List<String> nodes = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, nodes, flowSinkRelation.getColumns().get(0)); List<String> interfaces = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, interfaces, flowSinkRelation.getColumns().get(1)); for (int i = 0; i < nodes.size(); i++) { String node = nodes.get(i); String iface = interfaces.get(i); FlowSinkInterface f = new FlowSinkInterface(node, iface); flowSinks.add(f); } return flowSinks; } private List<String> getHelpPredicates(Map<String, String> predicateSemantics) { Set<String> helpPredicateSet = new LinkedHashSet<String>(); _settings.getHelpPredicates(); if (_settings.getHelpPredicates() == null) { helpPredicateSet.addAll(predicateSemantics.keySet()); } else { helpPredicateSet.addAll(_settings.getHelpPredicates()); } List<String> helpPredicates = new ArrayList<String>(); helpPredicates.addAll(helpPredicateSet); Collections.sort(helpPredicates); return helpPredicates; } private PolicyRouteFibNodeMap getPolicyRouteFibNodeMap( Relation fibPolicyRouteNextHops, LogicBloxFrontend lbFrontend) { PolicyRouteFibNodeMap nodeMap = new PolicyRouteFibNodeMap(); List<String> nodeList = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, nodeList, fibPolicyRouteNextHops.getColumns().get(0)); List<String> ipList = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_IP, ipList, fibPolicyRouteNextHops.getColumns().get(1)); List<String> outInterfaces = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, outInterfaces, fibPolicyRouteNextHops.getColumns().get(2)); List<String> inNodes = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, inNodes, fibPolicyRouteNextHops.getColumns().get(3)); List<String> inInterfaces = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, inInterfaces, fibPolicyRouteNextHops.getColumns().get(4)); int size = nodeList.size(); for (int i = 0; i < size; i++) { String nodeOut = nodeList.get(i); String nodeIn = inNodes.get(i); Ip ip = new Ip(ipList.get(i)); String ifaceOut = outInterfaces.get(i); String ifaceIn = inInterfaces.get(i); PolicyRouteFibIpMap ipMap = nodeMap.get(nodeOut); if (ipMap == null) { ipMap = new PolicyRouteFibIpMap(); nodeMap.put(nodeOut, ipMap); } EdgeSet edges = ipMap.get(ip); if (edges == null) { edges = new EdgeSet(); ipMap.put(ip, edges); } Edge newEdge = new Edge(nodeOut, ifaceOut, nodeIn, ifaceIn); edges.add(newEdge); } return nodeMap; } public PredicateInfo getPredicateInfo(Map<String, String> logicFiles) { // Get predicate semantics from rules file _logger.info("\n*** PARSING PREDICATE INFO ***\n"); resetTimer(); String predicateInfoPath = getPredicateInfoPath(); PredicateInfo predicateInfo = (PredicateInfo) deserializeObject(new File( predicateInfoPath)); printElapsedTime(); return predicateInfo; } private String getPredicateInfoPath() { File logicDir = retrieveLogicDir(); return Paths.get(logicDir.toString(), PREDICATE_INFO_FILENAME).toString(); } private FibMap getRouteForwardingRules(Relation fibNetworkForward, LogicBloxFrontend lbFrontend) { FibMap fibs = new FibMap(); List<String> nameList = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, nameList, fibNetworkForward.getColumns().get(0)); List<String> networkList = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_INDEX_NETWORK, networkList, fibNetworkForward.getColumns().get(1)); List<String> interfaceList = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, interfaceList, fibNetworkForward.getColumns().get(2)); List<String> nextHopList = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, nextHopList, fibNetworkForward.getColumns().get(3)); List<String> nextHopIntList = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, nextHopIntList, fibNetworkForward.getColumns().get(4)); String currentHostname = ""; Map<String, Integer> startIndices = new HashMap<String, Integer>(); Map<String, Integer> endIndices = new HashMap<String, Integer>(); for (int i = 0; i < nameList.size(); i++) { String currentRowHostname = nameList.get(i); if (!currentHostname.equals(currentRowHostname)) { if (i > 0) { endIndices.put(currentHostname, i - 1); } currentHostname = currentRowHostname; startIndices.put(currentHostname, i); } } endIndices.put(currentHostname, nameList.size() - 1); for (String hostname : startIndices.keySet()) { FibSet fibRows = new FibSet(); fibs.put(hostname, fibRows); int startIndex = startIndices.get(hostname); int endIndex = endIndices.get(hostname); for (int i = startIndex; i <= endIndex; i++) { String networkStr = networkList.get(i); Prefix prefix = new Prefix(networkStr); String iface = interfaceList.get(i); String nextHop = nextHopList.get(i); String nextHopInt = nextHopIntList.get(i); fibRows.add(new FibRow(prefix, iface, nextHop, nextHopInt)); } } return fibs; } private Map<String, String> getSemanticsFiles() { final Map<String, String> semanticsFiles = new HashMap<String, String>(); File logicDirFile = retrieveLogicDir(); FileVisitor<Path> visitor = new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { String pathString = file.toString(); if (pathString.endsWith(".semantics")) { String contents = FileUtils.readFileToString(file.toFile()); semanticsFiles.put(pathString, contents); } return super.visitFile(file, attrs); } }; try { Files.walkFileTree(Paths.get(logicDirFile.getAbsolutePath()), visitor); } catch (IOException e) { e.printStackTrace(); } cleanupLogicDir(); return semanticsFiles; } public EdgeSet getTopologyEdges(LogicBloxFrontend lbFrontend) { EdgeSet edges = new EdgeSet(); String qualifiedName = _predicateInfo.getPredicateNames().get( TOPOLOGY_PREDICATE_NAME); Relation topologyRelation = lbFrontend.queryPredicate(qualifiedName); List<String> fromRouters = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, fromRouters, topologyRelation.getColumns().get(0)); List<String> fromInterfaces = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, fromInterfaces, topologyRelation.getColumns().get(1)); List<String> toRouters = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, toRouters, topologyRelation.getColumns().get(2)); List<String> toInterfaces = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, toInterfaces, topologyRelation.getColumns().get(3)); for (int i = 0; i < fromRouters.size(); i++) { if (Util.isLoopback(fromInterfaces.get(i)) || Util.isLoopback(toInterfaces.get(i))) { continue; } Edge newEdge = new Edge(fromRouters.get(i), fromInterfaces.get(i), toRouters.get(i), toInterfaces.get(i)); edges.add(newEdge); } return edges; } private void histogram(String testRigPath) { Map<File, String> configurationData = readConfigurationFiles(testRigPath); Map<String, VendorConfiguration> vendorConfigurations = parseVendorConfigurations(configurationData); _logger.info("Building feature histogram..."); MultiSet<String> histogram = new TreeMultiSet<String>(); for (VendorConfiguration vc : vendorConfigurations.values()) { Set<String> unimplementedFeatures = vc.getUnimplementedFeatures(); histogram.add(unimplementedFeatures); } _logger.info("OK\n"); for (String feature : histogram.elements()) { int count = histogram.count(feature); _logger.output(feature + ": " + count + "\n"); } } private ConfigurationFormat identifyConfigurationFormat(String fileText) { char firstChar = fileText.trim().charAt(0); if (firstChar == '!') { if (fileText.contains("set prompt")) { return ConfigurationFormat.VXWORKS; } else { return ConfigurationFormat.CISCO; } } else if (fileText.contains("set hostname")) { return ConfigurationFormat.JUNIPER_SWITCH; } else if (firstChar == '#') { if (fileText.contains("set version")) { return ConfigurationFormat.FLAT_JUNIPER; } else { return ConfigurationFormat.JUNIPER; } } else { return ConfigurationFormat.UNKNOWN; } } public LogicBloxFrontend initFrontend(boolean assumedToExist, String workspace) throws LBInitializationException { _logger.info("\n*** STARTING CONNECTBLOX SESSION ***\n"); resetTimer(); LogicBloxFrontend lbFrontend = new LogicBloxFrontend( _settings.getConnectBloxHost(), _settings.getConnectBloxPort(), _settings.getLbWebPort(), _settings.getLbWebAdminPort(), workspace, assumedToExist, _logger); lbFrontend.initialize(); if (!lbFrontend.connected()) { throw new BatfishException( "Error connecting to ConnectBlox service. Please make sure service is running and try again."); } _logger.info("SUCCESS\n"); printElapsedTime(); _lbFrontends.add(lbFrontend); return lbFrontend; } private boolean isJavaSerializationData(File inputFile) { try (FileInputStream i = new FileInputStream(inputFile)) { int headerLength = JAVA_SERIALIZED_OBJECT_HEADER.length; byte[] headerBytes = new byte[headerLength]; int result = i.read(headerBytes, 0, headerLength); if (result != headerLength) { throw new BatfishException("Read wrong number of bytes"); } return Arrays.equals(headerBytes, JAVA_SERIALIZED_OBJECT_HEADER); } catch (IOException e) { throw new BatfishException("Could not read header from file: " + inputFile.toString(), e); } } private ParserRuleContext parse(BatfishCombinedParser<?, ?> parser) { ParserRuleContext tree; try { tree = parser.parse(); } catch (BatfishException e) { throw new ParserBatfishException("Parser error", e); } List<String> errors = parser.getErrors(); int numErrors = errors.size(); if (numErrors > 0) { _logger.error(numErrors + " ERROR(S)\n"); for (int i = 0; i < numErrors; i++) { String prefix = "ERROR " + (i + 1) + ": "; String msg = errors.get(i); String prefixedMsg = Util.applyPrefix(prefix, msg); _logger.error(prefixedMsg + "\n"); } throw new ParserBatfishException("Parser error(s)"); } else if (!_settings.printParseTree()) { _logger.info("OK\n"); } else { _logger.info("OK, PRINTING PARSE TREE:\n"); _logger.info(ParseTreePrettyPrinter.print(tree, parser) + "\n\n"); } return tree; } private ParserRuleContext parse(BatfishCombinedParser<?, ?> parser, String filename) { _logger.info("Parsing: \"" + filename + "\"..."); return parse(parser); } private void parseFlowsFromConstraints(StringBuilder sb, RoleNodeMap roleNodes) { Path flowConstraintsDir = Paths.get(_settings.getFlowPath()); File[] constraintsFiles = flowConstraintsDir.toFile().listFiles( new FilenameFilter() { @Override public boolean accept(File dir, String filename) { return filename.matches(".*-concrete-.*.smt2.out"); } }); if (constraintsFiles == null) { throw new BatfishException("Error reading flow constraints directory"); } for (File constraintsFile : constraintsFiles) { String flowConstraintsText = readFile(constraintsFile); ConcretizerQueryResultCombinedParser parser = new ConcretizerQueryResultCombinedParser( flowConstraintsText, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); ParserRuleContext tree = parse(parser, constraintsFile.toString()); ParseTreeWalker walker = new ParseTreeWalker(); ConcretizerQueryResultExtractor extractor = new ConcretizerQueryResultExtractor(); walker.walk(extractor, tree); String id = extractor.getId(); if (id == null) { continue; } Map<String, Long> constraints = extractor.getConstraints(); long src_ip = 0; long dst_ip = 0; long src_port = 0; long dst_port = 0; long protocol = IpProtocol.IP.number(); for (String varName : constraints.keySet()) { Long value = constraints.get(varName); switch (varName) { case Synthesizer.SRC_IP_VAR: src_ip = value; break; case Synthesizer.DST_IP_VAR: dst_ip = value; break; case Synthesizer.SRC_PORT_VAR: src_port = value; break; case Synthesizer.DST_PORT_VAR: dst_port = value; break; case Synthesizer.IP_PROTOCOL_VAR: protocol = value; break; default: throw new Error("invalid variable name"); } } // TODO: cleanup dirty hack if (roleNodes != null) { // id is role NodeSet nodes = roleNodes.get(id); for (String node : nodes) { String line = node + "|" + src_ip + "|" + dst_ip + "|" + src_port + "|" + dst_port + "|" + protocol + "\n"; sb.append(line); } } else { String node = id; String line = node + "|" + src_ip + "|" + dst_ip + "|" + src_port + "|" + dst_port + "|" + protocol + "\n"; sb.append(line); } } } private NodeRoleMap parseNodeRoles(String testRigPath) { Path rolePath = Paths.get(testRigPath, "node_roles"); String roleFileText = readFile(rolePath.toFile()); _logger.info("Parsing: \"" + rolePath.toAbsolutePath().toString() + "\""); BatfishCombinedParser<?, ?> parser = new RoleCombinedParser(roleFileText, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); RoleExtractor extractor = new RoleExtractor(); ParserRuleContext tree = parse(parser); ParseTreeWalker walker = new ParseTreeWalker(); walker.walk(extractor, tree); NodeRoleMap nodeRoles = extractor.getRoleMap(); return nodeRoles; } private Topology parseTopology(File topologyFilePath) { _logger.info("*** PARSING TOPOLOGY ***\n"); resetTimer(); String topologyFileText = readFile(topologyFilePath); BatfishCombinedParser<?, ?> parser = null; TopologyExtractor extractor = null; _logger.info("Parsing: \"" + topologyFilePath.getAbsolutePath().toString() + "\""); if (topologyFileText.startsWith("autostart")) { parser = new GNS3TopologyCombinedParser(topologyFileText, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); extractor = new GNS3TopologyExtractor(); } else if (topologyFileText.startsWith("CONFIGPARSER_TOPOLOGY")) { parser = new BatfishTopologyCombinedParser(topologyFileText, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); extractor = new BatfishTopologyExtractor(); } else if (topologyFileText.equals("")) { throw new BatfishException("...ERROR: empty topology\n"); } else { _logger.fatal("...ERROR\n"); throw new BatfishException("Topology format error"); } ParserRuleContext tree = parse(parser); ParseTreeWalker walker = new ParseTreeWalker(); walker.walk(extractor, tree); Topology topology = extractor.getTopology(); printElapsedTime(); return topology; } private Map<String, VendorConfiguration> parseVendorConfigurations( Map<File, String> configurationData) { _logger.info("\n*** PARSING VENDOR CONFIGURATION FILES ***\n"); resetTimer(); Map<String, VendorConfiguration> vendorConfigurations = new TreeMap<String, VendorConfiguration>(); boolean processingError = false; for (File currentFile : configurationData.keySet()) { String fileText = configurationData.get(currentFile); String currentPath = currentFile.getAbsolutePath(); VendorConfiguration vc = null; if (fileText.length() == 0) { continue; } BatfishCombinedParser<?, ?> combinedParser = null; ParserRuleContext tree = null; ControlPlaneExtractor extractor = null; Warnings warnings = new Warnings(_settings.getPedanticAsError(), _settings.getPedanticRecord() && _logger.isActive(BatfishLogger.LEVEL_PEDANTIC), _settings.getRedFlagAsError(), _settings.getRedFlagRecord() && _logger.isActive(BatfishLogger.LEVEL_REDFLAG), _settings.getUnimplementedAsError(), _settings.getUnimplementedRecord() && _logger.isActive(BatfishLogger.LEVEL_UNIMPLEMENTED), _settings.printParseTree()); ConfigurationFormat format = identifyConfigurationFormat(fileText); switch (format) { case ARISTA: case CISCO: CiscoCombinedParser ciscoParser = new CiscoCombinedParser(fileText, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); combinedParser = ciscoParser; extractor = new CiscoControlPlaneExtractor(fileText, ciscoParser, warnings); break; case JUNIPER: if (_settings.flattenOnTheFly()) { _logger .warn("Flattening: \"" + currentPath + "\" on-the-fly; line-numbers reported for this file will be spurious\n"); fileText = flatten(fileText); } else { throw new BatfishException( "Juniper configurations must be flattened prior to this stage"); } // MISSING BREAK IS INTENTIONAL case FLAT_JUNIPER: FlatJuniperCombinedParser flatJuniperParser = new FlatJuniperCombinedParser( fileText, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); combinedParser = flatJuniperParser; extractor = new FlatJuniperControlPlaneExtractor(fileText, flatJuniperParser, warnings); break; case JUNIPER_SWITCH: case VXWORKS: String unsupportedError = "Unsupported configuration format: \"" + format.toString() + "\" for file: \"" + currentPath + "\"\n"; if (!_settings.ignoreUnsupported() && _settings.exitOnParseError()) { throw new BatfishException(unsupportedError); } else if (!_settings.ignoreUnsupported()) { processingError = true; _logger.error(unsupportedError); } else { _logger.warn(unsupportedError); } continue; case UNKNOWN: default: String unknownError = "Unknown configuration format for file: \"" + currentPath + "\"\n"; if (_settings.exitOnParseError()) { throw new BatfishException(unknownError); } else { _logger.error(unknownError); processingError = true; continue; } } try { tree = parse(combinedParser, currentPath); _logger.info("\tPost-processing..."); extractor.processParseTree(tree); _logger.info("OK\n"); } catch (ParserBatfishException e) { String error = "Error parsing configuration file: \"" + currentPath + "\""; if (_settings.exitOnParseError()) { throw new BatfishException(error, e); } else { _logger.error(error + ":\n"); _logger.error(ExceptionUtils.getStackTrace(e)); processingError = true; continue; } } catch (Exception e) { String error = "Error post-processing parse tree of configuration file: \"" + currentPath + "\""; if (_settings.exitOnParseError()) { throw new BatfishException(error, e); } else { _logger.error(error + ":\n"); _logger.error(ExceptionUtils.getStackTrace(e)); processingError = true; continue; } } finally { for (String warning : warnings.getRedFlagWarnings()) { _logger.redflag(warning); } for (String warning : warnings.getUnimplementedWarnings()) { _logger.unimplemented(warning); } for (String warning : warnings.getPedanticWarnings()) { _logger.pedantic(warning); } } vc = extractor.getVendorConfiguration(); // at this point we should have a VendorConfiguration vc String hostname = vc.getHostname(); if (hostname == null) { String error = "No hostname set in file: \"" + currentFile + "\"\n"; if (_settings.exitOnParseError()) { throw new BatfishException(error); } else { _logger.error(error); processingError = true; continue; } } if (vendorConfigurations.containsKey(hostname)) { String error = "Duplicate hostname \"" + vc.getHostname() + "\" found in " + currentFile + "\n"; if (_settings.exitOnParseError()) { throw new BatfishException(error); } else { _logger.error(error); processingError = true; continue; } } vendorConfigurations.put(vc.getHostname(), vc); } if (processingError) { return null; } else { printElapsedTime(); return vendorConfigurations; } } private void populateConfigurationFactBins( Collection<Configuration> configurations, Map<String, StringBuilder> factBins) { _logger .info("\n*** EXTRACTING LOGICBLOX FACTS FROM CONFIGURATIONS ***\n"); resetTimer(); Set<Long> communities = new LinkedHashSet<Long>(); for (Configuration c : configurations) { communities.addAll(c.getCommunities()); } for (Configuration c : configurations) { ConfigurationFactExtractor cfe = new ConfigurationFactExtractor(c, communities, factBins); cfe.writeFacts(); for (String warning : cfe.getWarnings()) { _logger.warn(warning); } } printElapsedTime(); } private void postFacts(LogicBloxFrontend lbFrontend, Map<String, StringBuilder> factBins) { _logger.info("\n*** POSTING FACTS TO BLOXWEB SERVICES ***\n"); resetTimer(); _logger.info("Starting bloxweb services..."); lbFrontend.startLbWebServices(); _logger.info("OK\n"); _logger.info("Posting facts..."); try { lbFrontend.postFacts(factBins); } catch (ServiceClientException e) { throw new BatfishException("Failed to post facts to bloxweb services", e); } _logger.info("OK\n"); _logger.info("Stopping bloxweb services..."); lbFrontend.stopLbWebServices(); _logger.info("OK\n"); _logger.info("SUCCESS\n"); printElapsedTime(); } private void printAllPredicateSemantics( Map<String, String> predicateSemantics) { // Get predicate semantics from rules file _logger.info("\n*** PRINTING PREDICATE SEMANTICS ***\n"); List<String> helpPredicates = getHelpPredicates(predicateSemantics); for (String predicate : helpPredicates) { printPredicateSemantics(predicate); _logger.info("\n"); } } private void printElapsedTime() { double seconds = getElapsedTime(_timerCount); _logger.info("Time taken for this task: " + seconds + " seconds\n"); } private void printPredicate(LogicBloxFrontend lbFrontend, String predicateName) { List<String> output; printPredicateSemantics(predicateName); String qualifiedName = _predicateInfo.getPredicateNames().get( predicateName); if (qualifiedName == null) { // predicate not found _logger.error("ERROR: No information for predicate: " + predicateName + "\n"); return; } Relation relation = lbFrontend.queryPredicate(qualifiedName); try { output = lbFrontend.getPredicate(_predicateInfo, relation, predicateName); for (String match : output) { _logger.output(match + "\n"); } } catch (QueryException q) { _logger.fatal(q.getMessage() + "\n"); } } private void printPredicateCount(LogicBloxFrontend lbFrontend, String predicateName) { int numRows = lbFrontend.queryPredicate(predicateName).getColumns() .get(0).size(); String output = "|" + predicateName + "| = " + numRows + "\n"; _logger.info(output); } public void printPredicateCounts(LogicBloxFrontend lbFrontend, Set<String> predicateNames) { // Print predicate(s) here _logger.info("\n*** SUBMITTING QUERY(IES) ***\n"); resetTimer(); for (String predicateName : predicateNames) { printPredicateCount(lbFrontend, predicateName); // _logger.info("\n"); } printElapsedTime(); } public void printPredicates(LogicBloxFrontend lbFrontend, Set<String> predicateNames) { // Print predicate(s) here _logger.info("\n*** SUBMITTING QUERY(IES) ***\n"); resetTimer(); for (String predicateName : predicateNames) { printPredicate(lbFrontend, predicateName); } printElapsedTime(); } private void printPredicateSemantics(String predicateName) { String semantics = _predicateInfo.getPredicateSemantics(predicateName); if (semantics == null) { semantics = "<missing>"; } _logger.info("\n"); _logger.info("Predicate: " + predicateName + "\n"); _logger.info("Semantics: " + semantics + "\n"); } private void processTopology(File topologyFilePath, Map<String, StringBuilder> factBins) { Topology topology = null; topology = parseTopology(topologyFilePath); TopologyFactExtractor tfe = new TopologyFactExtractor(topology); tfe.writeFacts(factBins); } private Map<File, String> readConfigurationFiles(String testRigPath) { _logger.info("\n*** READING CONFIGURATION FILES ***\n"); resetTimer(); Map<File, String> configurationData = new TreeMap<File, String>(); File configsPath = Paths.get(testRigPath, "configs").toFile(); File[] configFilePaths = configsPath.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return !name.startsWith("."); } }); if (configFilePaths == null) { throw new BatfishException("Error reading test rig configs directory"); } for (File file : configFilePaths) { _logger.debug("Reading: \"" + file.toString() + "\"\n"); String fileText = readFile(file.getAbsoluteFile()) + "\n"; configurationData.put(file, fileText); } printElapsedTime(); return configurationData; } public String readFile(File file) { String text = null; try { text = FileUtils.readFileToString(file); } catch (IOException e) { throw new BatfishException("Failed to read file: " + file.toString(), e); } return text; } private void resetTimer() { _timerCount = System.currentTimeMillis(); } private File retrieveLogicDir() { File logicDirFile = null; final String locatorFilename = LogicResourceLocator.class.getSimpleName() + ".class"; URL logicSourceURL = LogicResourceLocator.class.getProtectionDomain() .getCodeSource().getLocation(); String logicSourceString = logicSourceURL.toString(); UrlZipExplorer zip = null; StringFilter lbFilter = new StringFilter() { @Override public boolean accept(String filename) { return filename.endsWith(".lbb") || filename.endsWith(".lbp") || filename.endsWith(".semantics") || filename.endsWith(locatorFilename) || filename.endsWith(PREDICATE_INFO_FILENAME); } }; if (logicSourceString.startsWith("onejar:")) { FileVisitor<Path> visitor = null; try { zip = new UrlZipExplorer(logicSourceURL); Path destinationDir = Files.createTempDirectory("lbtmpproject"); File destinationDirAsFile = destinationDir.toFile(); zip.extractFiles(lbFilter, destinationDirAsFile); visitor = new SimpleFileVisitor<Path>() { private String _projectDirectory; @Override public String toString() { return _projectDirectory; } @Override public FileVisitResult visitFile(Path aFile, BasicFileAttributes aAttrs) throws IOException { if (aFile.endsWith(locatorFilename)) { _projectDirectory = aFile.getParent().toString(); return FileVisitResult.TERMINATE; } return FileVisitResult.CONTINUE; } }; Files.walkFileTree(destinationDir, visitor); _tmpLogicDir = destinationDirAsFile; } catch (IOException e) { throw new BatfishException( "Failed to retrieve logic dir from onejar archive", e); } String fileString = visitor.toString(); return new File(fileString); } else { String logicPackageResourceName = LogicResourceLocator.class .getPackage().getName().replace('.', SEPARATOR.charAt(0)); try { logicDirFile = new File(LogicResourceLocator.class.getClassLoader() .getResource(logicPackageResourceName).toURI()); } catch (URISyntaxException e) { throw new BatfishException("Failed to resolve logic directory", e); } return logicDirFile; } } private void revert(LogicBloxFrontend lbFrontend) { _logger.info("\n*** REVERTING WORKSPACE ***\n"); String workspaceName = new File(_settings.getTestRigPath()).getName(); String branchName = _settings.getBranchName(); _logger.debug("Reverting workspace: \"" + workspaceName + "\" to branch: \"" + branchName + "\n"); String errorResult = lbFrontend.revertDatabase(branchName); if (errorResult != null) { throw new BatfishException("Failed to revert database: " + errorResult); } } public void run() { if (_settings.getBuildPredicateInfo()) { buildPredicateInfo(); return; } if (_settings.getHistogram()) { histogram(_settings.getTestRigPath()); return; } if (_settings.getGenerateOspfTopologyPath() != null) { generateOspfConfigs(_settings.getGenerateOspfTopologyPath(), _settings.getSerializeIndependentPath()); return; } if (_settings.getFlatten()) { String flattenSource = Paths.get(_settings.getFlattenSource(), "configs").toString(); String flattenDestination = Paths.get( _settings.getFlattenDestination(), "configs").toString(); flatten(flattenSource, flattenDestination); return; } if (_settings.getGenerateStubs()) { String configPath = _settings.getSerializeIndependentPath(); String inputRole = _settings.getGenerateStubsInputRole(); String interfaceDescriptionRegex = _settings .getGenerateStubsInterfaceDescriptionRegex(); int stubAs = _settings.getGenerateStubsRemoteAs(); generateStubs(inputRole, stubAs, interfaceDescriptionRegex, configPath); return; } if (_settings.getZ3()) { Map<String, Configuration> configurations = deserializeConfigurations(_settings .getSerializeIndependentPath()); genZ3(configurations); return; } if (_settings.getAnonymize()) { anonymizeConfigurations(); return; } if (_settings.getInterfaceFailureInconsistencyReachableQuery()) { genReachableQueries(); return; } if (_settings.getRoleReachabilityQuery()) { genRoleReachabilityQueries(); return; } if (_settings.getRoleTransitQuery()) { genRoleTransitQueries(); return; } if (_settings.getInterfaceFailureInconsistencyBlackHoleQuery()) { genBlackHoleQueries(); return; } if (_settings.getGenerateMultipathInconsistencyQuery()) { genMultipathQueries(); return; } if (_settings.getSerializeVendor()) { String testRigPath = _settings.getTestRigPath(); String outputPath = _settings.getSerializeVendorPath(); serializeVendorConfigs(testRigPath, outputPath); return; } if (_settings.dumpInterfaceDescriptions()) { String testRigPath = _settings.getTestRigPath(); String outputPath = _settings.getDumpInterfaceDescriptionsPath(); dumpInterfaceDescriptions(testRigPath, outputPath); return; } if (_settings.getSerializeIndependent()) { String inputPath = _settings.getSerializeVendorPath(); String outputPath = _settings.getSerializeIndependentPath(); serializeIndependentConfigs(inputPath, outputPath); return; } if (_settings.getConcretize()) { concretize(); return; } if (_settings.getQuery() || _settings.getPrintSemantics() || _settings.getDataPlane()) { Map<String, String> logicFiles = getSemanticsFiles(); _predicateInfo = getPredicateInfo(logicFiles); // Print predicate semantics and quit if requested if (_settings.getPrintSemantics()) { printAllPredicateSemantics(_predicateInfo.getPredicateSemantics()); return; } } Map<String, StringBuilder> cpFactBins = null; if (_settings.getFacts() || _settings.getDumpControlPlaneFacts()) { cpFactBins = new LinkedHashMap<String, StringBuilder>(); initControlPlaneFactBins(cpFactBins); Map<String, Configuration> configurations = deserializeConfigurations(_settings .getSerializeIndependentPath()); writeTopologyFacts(_settings.getTestRigPath(), configurations, cpFactBins); writeConfigurationFacts(configurations, cpFactBins); String flowSinkPath = _settings.getFlowSinkPath(); if (flowSinkPath != null) { FlowSinkSet flowSinks = (FlowSinkSet) deserializeObject(new File( flowSinkPath)); writeFlowSinkFacts(flowSinks, cpFactBins); } if (_settings.getDumpControlPlaneFacts()) { dumpFacts(cpFactBins); } if (!(_settings.getFacts() || _settings.createWorkspace())) { return; } } // Start frontend LogicBloxFrontend lbFrontend = null; if (_settings.createWorkspace() || _settings.getFacts() || _settings.getQuery() || _settings.getDataPlane() || _settings.revert()) { lbFrontend = connect(); } if (_settings.revert()) { revert(lbFrontend); return; } // Create new workspace (will overwrite existing) if requested if (_settings.createWorkspace()) { addProject(lbFrontend); String lbHostnamePath = _settings.getJobLogicBloxHostnamePath(); String lbHostname = _settings.getServiceLogicBloxHostname(); if (lbHostnamePath != null && lbHostname != null) { writeFile(lbHostnamePath, lbHostname); } if (!_settings.getFacts()) { return; } } // Post facts if requested if (_settings.getFacts()) { addStaticFacts(lbFrontend, BASIC_FACTS_BLOCKNAME); postFacts(lbFrontend, cpFactBins); return; } if (_settings.getQuery()) { lbFrontend.initEntityTable(); Map<String, String> allPredicateNames = _predicateInfo .getPredicateNames(); Set<String> predicateNames = new TreeSet<String>(); if (_settings.getQueryAll()) { predicateNames.addAll(allPredicateNames.keySet()); } else { predicateNames.addAll(_settings.getPredicates()); } if (_settings.getCountsOnly()) { printPredicateCounts(lbFrontend, predicateNames); } else { printPredicates(lbFrontend, predicateNames); } return; } if (_settings.getDataPlane()) { computeDataPlane(lbFrontend); return; } Map<String, StringBuilder> trafficFactBins = null; if (_settings.getFlows() || _settings.getDumpTrafficFacts()) { trafficFactBins = new LinkedHashMap<String, StringBuilder>(); initTrafficFactBins(trafficFactBins); writeTrafficFacts(trafficFactBins); if (_settings.getDumpTrafficFacts()) { dumpFacts(trafficFactBins); } if (_settings.getFlows()) { lbFrontend = connect(); postFacts(lbFrontend, trafficFactBins); return; } } throw new BatfishException( "No task performed! Run with -help flag to see usage"); } private void serializeIndependentConfigs( Map<String, Configuration> configurations, String outputPath) { _logger .info("\n*** SERIALIZING VENDOR-INDEPENDENT CONFIGURATION STRUCTURES ***\n"); resetTimer(); new File(outputPath).mkdirs(); for (String name : configurations.keySet()) { Configuration c = configurations.get(name); Path currentOutputPath = Paths.get(outputPath, name); _logger.info("Serializing: \"" + name + "\" ==> \"" + currentOutputPath.toString() + "\""); serializeObject(c, currentOutputPath.toFile()); _logger.debug(" ...OK\n"); } printElapsedTime(); } private void serializeIndependentConfigs(String vendorConfigPath, String outputPath) { Map<String, Configuration> configurations = getConfigurations(vendorConfigPath); serializeIndependentConfigs(configurations, outputPath); } private void serializeObject(Object object, File outputFile) { FileOutputStream fos; ObjectOutputStream oos; try { fos = new FileOutputStream(outputFile); if (_settings.getSerializeToText()) { XStream xstream = new XStream(new DomDriver("UTF-8")); oos = xstream.createObjectOutputStream(fos); } else { oos = new ObjectOutputStream(fos); } oos.writeObject(object); oos.close(); } catch (IOException e) { throw new BatfishException( "Failed to serialize object to output file: " + outputFile.toString(), e); } } private void serializeVendorConfigs(String testRigPath, String outputPath) { Map<File, String> configurationData = readConfigurationFiles(testRigPath); Map<String, VendorConfiguration> vendorConfigurations = parseVendorConfigurations(configurationData); if (vendorConfigurations == null) { throw new BatfishException("Exiting due to parser errors\n"); } String nodeRolesPath = _settings.getNodeRolesPath(); if (nodeRolesPath != null) { NodeRoleMap nodeRoles = parseNodeRoles(testRigPath); for (Entry<String, RoleSet> nodeRolesEntry : nodeRoles.entrySet()) { String hostname = nodeRolesEntry.getKey(); VendorConfiguration config = vendorConfigurations.get(hostname); if (config == null) { throw new BatfishException( "role set assigned to non-existent node: \"" + hostname + "\""); } RoleSet roles = nodeRolesEntry.getValue(); config.setRoles(roles); } _logger.info("Serializing node-roles mappings: \"" + nodeRolesPath + "\"..."); serializeObject(nodeRoles, new File(nodeRolesPath)); _logger.info("OK\n"); } _logger.info("\n*** SERIALIZING VENDOR CONFIGURATION STRUCTURES ***\n"); resetTimer(); new File(outputPath).mkdirs(); for (String name : vendorConfigurations.keySet()) { VendorConfiguration vc = vendorConfigurations.get(name); Path currentOutputPath = Paths.get(outputPath, name); _logger.debug("Serializing: \"" + name + "\" ==> \"" + currentOutputPath.toString() + "\"..."); serializeObject(vc, currentOutputPath.toFile()); _logger.debug("OK\n"); } printElapsedTime(); } public void writeConfigurationFacts( Map<String, Configuration> configurations, Map<String, StringBuilder> factBins) { populateConfigurationFactBins(configurations.values(), factBins); } private void writeFile(String outputPath, String output) { File outputFile = new File(outputPath); try { FileUtils.write(outputFile, output); } catch (IOException e) { throw new BatfishException("Failed to write file: " + outputPath, e); } } private void writeFlowSinkFacts(FlowSinkSet flowSinks, Map<String, StringBuilder> cpFactBins) { StringBuilder sb = cpFactBins.get("SetFlowSinkInterface"); for (FlowSinkInterface f : flowSinks) { String node = f.getNode(); String iface = f.getInterface(); sb.append(node + "|" + iface + "\n"); } } public void writeTopologyFacts(String testRigPath, Map<String, Configuration> configurations, Map<String, StringBuilder> factBins) { Path topologyFilePath = Paths.get(testRigPath, TOPOLOGY_FILENAME); // Get generated facts from topology file if (Files.exists(topologyFilePath)) { processTopology(topologyFilePath.toFile(), factBins); } else { // tell logicblox to guess adjacencies based on interface // subnetworks _logger .info("*** (GUESSING TOPOLOGY IN ABSENCE OF EXPLICIT FILE) ***\n"); StringBuilder wGuessTopology = factBins.get("GuessTopology"); wGuessTopology.append("1\n"); } } private void writeTrafficFacts(Map<String, StringBuilder> factBins) { StringBuilder wSetFlowOriginate = factBins.get("SetFlowOriginate"); RoleNodeMap roleNodes = null; if (_settings.getRoleHeaders()) { String nodeRolesPath = _settings.getNodeRolesPath(); NodeRoleMap nodeRoles = (NodeRoleMap) deserializeObject(new File( nodeRolesPath)); roleNodes = nodeRoles.toRoleNodeMap(); } parseFlowsFromConstraints(wSetFlowOriginate, roleNodes); if (_settings.duplicateRoleFlows()) { StringBuilder wDuplicateRoleFlows = factBins.get("DuplicateRoleFlows"); wDuplicateRoleFlows.append("1\n"); } } }
projects/batfish/src/org/batfish/main/Batfish.java
package org.batfish.main; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FilenameFilter; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.FileVisitResult; import java.nio.file.FileVisitor; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.tree.ParseTreeWalker; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.exception.ExceptionUtils; import org.batfish.collections.EdgeSet; import org.batfish.collections.FibMap; import org.batfish.collections.FibRow; import org.batfish.collections.FibSet; import org.batfish.collections.FlowSinkInterface; import org.batfish.collections.FlowSinkSet; import org.batfish.collections.FunctionSet; import org.batfish.collections.MultiSet; import org.batfish.collections.NodeInterfacePair; import org.batfish.collections.NodeRoleMap; import org.batfish.collections.NodeSet; import org.batfish.collections.PolicyRouteFibIpMap; import org.batfish.collections.PolicyRouteFibNodeMap; import org.batfish.collections.PredicateSemantics; import org.batfish.collections.PredicateValueTypeMap; import org.batfish.collections.QualifiedNameMap; import org.batfish.collections.RoleNodeMap; import org.batfish.collections.RoleSet; import org.batfish.collections.TreeMultiSet; import org.batfish.grammar.BatfishCombinedParser; import org.batfish.grammar.ControlPlaneExtractor; import org.batfish.grammar.ParseTreePrettyPrinter; import org.batfish.grammar.cisco.CiscoCombinedParser; import org.batfish.grammar.cisco.CiscoControlPlaneExtractor; import org.batfish.grammar.flatjuniper.FlatJuniperCombinedParser; import org.batfish.grammar.flatjuniper.FlatJuniperControlPlaneExtractor; import org.batfish.grammar.juniper.JuniperCombinedParser; import org.batfish.grammar.juniper.JuniperFlattener; import org.batfish.grammar.logicblox.LogQLPredicateInfoExtractor; import org.batfish.grammar.logicblox.LogiQLCombinedParser; import org.batfish.grammar.logicblox.LogiQLPredicateInfoResolver; import org.batfish.grammar.topology.BatfishTopologyCombinedParser; import org.batfish.grammar.topology.BatfishTopologyExtractor; import org.batfish.grammar.topology.GNS3TopologyCombinedParser; import org.batfish.grammar.topology.GNS3TopologyExtractor; import org.batfish.grammar.topology.RoleCombinedParser; import org.batfish.grammar.topology.RoleExtractor; import org.batfish.grammar.topology.TopologyExtractor; import org.batfish.grammar.z3.ConcretizerQueryResultCombinedParser; import org.batfish.grammar.z3.ConcretizerQueryResultExtractor; import org.batfish.grammar.z3.DatalogQueryResultCombinedParser; import org.batfish.grammar.z3.DatalogQueryResultExtractor; import org.batfish.logic.LogicResourceLocator; import org.batfish.logicblox.ConfigurationFactExtractor; import org.batfish.logicblox.Facts; import org.batfish.logicblox.LBInitializationException; import org.batfish.logicblox.LBValueType; import org.batfish.logicblox.LogicBloxFrontend; import org.batfish.logicblox.PredicateInfo; import org.batfish.logicblox.ProjectFile; import org.batfish.logicblox.QueryException; import org.batfish.logicblox.TopologyFactExtractor; import org.batfish.representation.BgpNeighbor; import org.batfish.representation.BgpProcess; import org.batfish.representation.Configuration; import org.batfish.representation.Edge; import org.batfish.representation.Interface; import org.batfish.representation.Ip; import org.batfish.representation.IpProtocol; import org.batfish.representation.LineAction; import org.batfish.representation.OspfArea; import org.batfish.representation.OspfProcess; import org.batfish.representation.PolicyMap; import org.batfish.representation.PolicyMapAction; import org.batfish.representation.PolicyMapClause; import org.batfish.representation.PolicyMapMatchRouteFilterListLine; import org.batfish.representation.Prefix; import org.batfish.representation.RouteFilterLine; import org.batfish.representation.RouteFilterList; import org.batfish.representation.Topology; import org.batfish.representation.VendorConfiguration; import org.batfish.representation.cisco.CiscoVendorConfiguration; import org.batfish.util.StringFilter; import org.batfish.util.SubRange; import org.batfish.util.UrlZipExplorer; import org.batfish.util.Util; import org.batfish.z3.ConcretizerQuery; import org.batfish.z3.FailureInconsistencyBlackHoleQuerySynthesizer; import org.batfish.z3.MultipathInconsistencyQuerySynthesizer; import org.batfish.z3.QuerySynthesizer; import org.batfish.z3.ReachableQuerySynthesizer; import org.batfish.z3.RoleReachabilityQuerySynthesizer; import org.batfish.z3.RoleTransitQuerySynthesizer; import org.batfish.z3.Synthesizer; import com.logicblox.bloxweb.client.ServiceClientException; import com.logicblox.connect.Workspace.Relation; import com.thoughtworks.xstream.XStream; import com.thoughtworks.xstream.io.xml.DomDriver; /** * This class encapsulates the main control logic for Batfish. */ public class Batfish implements AutoCloseable { /** * Name of the LogiQL executable block containing basic facts that are true * for any network */ private static final String BASIC_FACTS_BLOCKNAME = "BaseFacts"; /** * Name of the file in which the topology of a network is serialized */ private static final String EDGES_FILENAME = "edges"; /** * Name of the LogiQL data-plane predicate containing next hop information * for policy-routing */ private static final String FIB_POLICY_ROUTE_NEXT_HOP_PREDICATE_NAME = "FibForwardPolicyRouteNextHopIp"; /** * Name of the LogiQL data-plane predicate containing next hop information * for destination-based routing */ private static final String FIB_PREDICATE_NAME = "FibNetwork"; /** * Name of the file in which the destination-routing FIBs are serialized */ private static final String FIBS_FILENAME = "fibs"; /** * Name of the file in which the policy-routing FIBs are serialized */ private static final String FIBS_POLICY_ROUTE_NEXT_HOP_FILENAME = "fibs-policy-route"; /** * Name of the LogiQL predicate containing flow-sink interface tags */ private static final String FLOW_SINK_PREDICATE_NAME = "FlowSinkInterface"; /** * Name of the file in which derived flow-sink interface tags are serialized */ private static final String FLOW_SINKS_FILENAME = "flow-sinks"; private static final String GEN_OSPF_STARTING_IP = "10.0.0.0"; /** * A byte-array containing the first 4 bytes comprising the header for a file * that is the output of java serialization */ private static final byte[] JAVA_SERIALIZED_OBJECT_HEADER = { (byte) 0xac, (byte) 0xed, (byte) 0x00, (byte) 0x05 }; /** * The name of the LogiQL library for org.batfish */ private static final String LB_BATFISH_LIBRARY_NAME = "libbatfish"; /** * The name of the file in which LogiQL predicate type-information and * documentation is serialized */ private static final String PREDICATE_INFO_FILENAME = "predicateInfo.object"; /** * A string containing the system-specific path separator character */ private static final String SEPARATOR = System.getProperty("file.separator"); /** * Role name for generated stubs */ private static final String STUB_ROLE = "generated_stubs"; /** * The name of the [optional] topology file within a test-rig */ private static final String TOPOLOGY_FILENAME = "topology.net"; /** * The name of the LogiQL predicate containing pairs of interfaces in the * same LAN segment */ private static final String TOPOLOGY_PREDICATE_NAME = "LanAdjacent"; private static void initControlPlaneFactBins( Map<String, StringBuilder> factBins) { initFactBins(Facts.CONTROL_PLANE_FACT_COLUMN_HEADERS, factBins); } private static void initFactBins(Map<String, String> columnHeaderMap, Map<String, StringBuilder> factBins) { for (String factPredicate : columnHeaderMap.keySet()) { String columnHeaders = columnHeaderMap.get(factPredicate); String initialText = columnHeaders + "\n"; factBins.put(factPredicate, new StringBuilder(initialText)); } } private static void initTrafficFactBins(Map<String, StringBuilder> factBins) { initFactBins(Facts.TRAFFIC_FACT_COLUMN_HEADERS, factBins); } private List<LogicBloxFrontend> _lbFrontends; private BatfishLogger _logger; private PredicateInfo _predicateInfo; private Settings _settings; private long _timerCount; private File _tmpLogicDir; public Batfish(Settings settings) { _settings = settings; _logger = _settings.getLogger(); _lbFrontends = new ArrayList<LogicBloxFrontend>(); _tmpLogicDir = null; } private void addProject(LogicBloxFrontend lbFrontend) { _logger.info("\n*** ADDING PROJECT ***\n"); resetTimer(); String settingsLogicDir = _settings.getLogicDir(); File logicDir; if (settingsLogicDir != null) { logicDir = new ProjectFile(settingsLogicDir); } else { logicDir = retrieveLogicDir().getAbsoluteFile(); } String result = lbFrontend.addProject(logicDir, ""); cleanupLogicDir(); if (result != null) { throw new BatfishException(result + "\n"); } _logger.info("SUCCESS\n"); printElapsedTime(); } private void addStaticFacts(LogicBloxFrontend lbFrontend, String blockName) { _logger.info("\n*** ADDING STATIC FACTS ***\n"); resetTimer(); _logger.info("Adding " + blockName + "...."); String output = lbFrontend.execNamedBlock(LB_BATFISH_LIBRARY_NAME + ":" + blockName); if (output == null) { _logger.info("OK\n"); } else { throw new BatfishException(output + "\n"); } _logger.info("SUCCESS\n"); printElapsedTime(); } private void anonymizeConfigurations() { // TODO Auto-generated method stub } /** * This function extracts predicate type information from the logic files. It * is meant only to be called during the build process, and should never be * executed from a jar */ private void buildPredicateInfo() { Path logicBinDirPath = null; URL logicSourceURL = LogicResourceLocator.class.getProtectionDomain() .getCodeSource().getLocation(); String logicSourceString = logicSourceURL.toString(); if (logicSourceString.startsWith("onejar:")) { throw new BatfishException( "buildPredicateInfo() should never be called from within a jar"); } String logicPackageResourceName = LogicResourceLocator.class.getPackage() .getName().replace('.', SEPARATOR.charAt(0)); try { logicBinDirPath = Paths.get(LogicResourceLocator.class .getClassLoader().getResource(logicPackageResourceName).toURI()); } catch (URISyntaxException e) { throw new BatfishException("Failed to resolve logic output directory", e); } Path logicSrcDirPath = Paths.get(_settings.getLogicSrcDir()); final Set<Path> logicFiles = new TreeSet<Path>(); try { Files.walkFileTree(logicSrcDirPath, new java.nio.file.SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { String name = file.getFileName().toString(); if (!name.equals("BaseFacts.logic") && !name.endsWith("_rules.logic") && !name.startsWith("service_") && name.endsWith(".logic")) { logicFiles.add(file); } return super.visitFile(file, attrs); } }); } catch (IOException e) { throw new BatfishException("Could not make list of logic files", e); } PredicateValueTypeMap predicateValueTypes = new PredicateValueTypeMap(); QualifiedNameMap qualifiedNameMap = new QualifiedNameMap(); FunctionSet functions = new FunctionSet(); PredicateSemantics predicateSemantics = new PredicateSemantics(); List<ParserRuleContext> trees = new ArrayList<ParserRuleContext>(); for (Path logicFilePath : logicFiles) { String input = readFile(logicFilePath.toFile()); LogiQLCombinedParser parser = new LogiQLCombinedParser(input, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); ParserRuleContext tree = parse(parser, logicFilePath.toString()); trees.add(tree); } ParseTreeWalker walker = new ParseTreeWalker(); for (ParserRuleContext tree : trees) { LogQLPredicateInfoExtractor extractor = new LogQLPredicateInfoExtractor( predicateValueTypes); walker.walk(extractor, tree); } for (ParserRuleContext tree : trees) { LogiQLPredicateInfoResolver resolver = new LogiQLPredicateInfoResolver( predicateValueTypes, qualifiedNameMap, functions, predicateSemantics); walker.walk(resolver, tree); } PredicateInfo predicateInfo = new PredicateInfo(predicateSemantics, predicateValueTypes, functions, qualifiedNameMap); File predicateInfoFile = logicBinDirPath.resolve(PREDICATE_INFO_FILENAME) .toFile(); serializeObject(predicateInfo, predicateInfoFile); } private void cleanupLogicDir() { if (_tmpLogicDir != null) { try { FileUtils.deleteDirectory(_tmpLogicDir); } catch (IOException e) { throw new BatfishException( "Error cleaning up temporary logic directory", e); } _tmpLogicDir = null; } } @Override public void close() throws Exception { for (LogicBloxFrontend lbFrontend : _lbFrontends) { // Close backend threads if (lbFrontend != null && lbFrontend.connected()) { lbFrontend.close(); } } } private void computeDataPlane(LogicBloxFrontend lbFrontend) { _logger.info("\n*** COMPUTING DATA PLANE STRUCTURES ***\n"); resetTimer(); lbFrontend.initEntityTable(); _logger.info("Retrieving flow sink information from LogicBlox..."); FlowSinkSet flowSinks = getFlowSinkSet(lbFrontend); _logger.info("OK\n"); _logger.info("Retrieving topology information from LogicBlox..."); EdgeSet topologyEdges = getTopologyEdges(lbFrontend); _logger.info("OK\n"); String fibQualifiedName = _predicateInfo.getPredicateNames().get( FIB_PREDICATE_NAME); _logger .info("Retrieving destination-routing FIB information from LogicBlox..."); Relation fibNetwork = lbFrontend.queryPredicate(fibQualifiedName); _logger.info("OK\n"); String fibPolicyRouteNextHopQualifiedName = _predicateInfo .getPredicateNames().get(FIB_POLICY_ROUTE_NEXT_HOP_PREDICATE_NAME); _logger .info("Retrieving policy-routing FIB information from LogicBlox..."); Relation fibPolicyRouteNextHops = lbFrontend .queryPredicate(fibPolicyRouteNextHopQualifiedName); _logger.info("OK\n"); _logger.info("Caclulating forwarding rules..."); FibMap fibs = getRouteForwardingRules(fibNetwork, lbFrontend); PolicyRouteFibNodeMap policyRouteFibNodeMap = getPolicyRouteFibNodeMap( fibPolicyRouteNextHops, lbFrontend); _logger.info("OK\n"); Path flowSinksPath = Paths.get(_settings.getDataPlaneDir(), FLOW_SINKS_FILENAME); Path fibsPath = Paths.get(_settings.getDataPlaneDir(), FIBS_FILENAME); Path fibsPolicyRoutePath = Paths.get(_settings.getDataPlaneDir(), FIBS_POLICY_ROUTE_NEXT_HOP_FILENAME); Path edgesPath = Paths.get(_settings.getDataPlaneDir(), EDGES_FILENAME); _logger.info("Serializing flow sink set..."); serializeObject(flowSinks, flowSinksPath.toFile()); _logger.info("OK\n"); _logger.info("Serializing fibs..."); serializeObject(fibs, fibsPath.toFile()); _logger.info("OK\n"); _logger.info("Serializing policy route next hop interface map..."); serializeObject(policyRouteFibNodeMap, fibsPolicyRoutePath.toFile()); _logger.info("OK\n"); _logger.info("Serializing toplogy edges..."); serializeObject(topologyEdges, edgesPath.toFile()); _logger.info("OK\n"); printElapsedTime(); } private void concretize() { _logger.info("\n*** GENERATING Z3 CONCRETIZER QUERIES ***\n"); resetTimer(); String[] concInPaths = _settings.getConcretizerInputFilePaths(); String[] negConcInPaths = _settings.getNegatedConcretizerInputFilePaths(); List<ConcretizerQuery> concretizerQueries = new ArrayList<ConcretizerQuery>(); String blacklistDstIpPath = _settings.getBlacklistDstIpPath(); if (blacklistDstIpPath != null) { String blacklistDstIpFileText = readFile(new File(blacklistDstIpPath)); String[] blacklistDstpIpStrs = blacklistDstIpFileText.split("\n"); Set<Ip> blacklistDstIps = new TreeSet<Ip>(); for (String blacklistDstIpStr : blacklistDstpIpStrs) { Ip blacklistDstIp = new Ip(blacklistDstIpStr); blacklistDstIps.add(blacklistDstIp); } if (blacklistDstIps.size() == 0) { _logger.warn("Warning: empty set of blacklisted destination ips\n"); } ConcretizerQuery blacklistIpQuery = ConcretizerQuery .blacklistDstIpQuery(blacklistDstIps); concretizerQueries.add(blacklistIpQuery); } for (String concInPath : concInPaths) { _logger.info("Reading z3 datalog query output file: \"" + concInPath + "\"..."); File queryOutputFile = new File(concInPath); String queryOutputStr = readFile(queryOutputFile); _logger.info("OK\n"); DatalogQueryResultCombinedParser parser = new DatalogQueryResultCombinedParser( queryOutputStr, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); ParserRuleContext tree = parse(parser, concInPath); _logger.info("Computing concretizer queries..."); ParseTreeWalker walker = new ParseTreeWalker(); DatalogQueryResultExtractor extractor = new DatalogQueryResultExtractor( _settings.concretizeUnique(), false); walker.walk(extractor, tree); _logger.info("OK\n"); List<ConcretizerQuery> currentQueries = extractor .getConcretizerQueries(); if (concretizerQueries.size() == 0) { concretizerQueries.addAll(currentQueries); } else { concretizerQueries = ConcretizerQuery.crossProduct( concretizerQueries, currentQueries); } } if (negConcInPaths != null) { for (String negConcInPath : negConcInPaths) { _logger .info("Reading z3 datalog query output file (to be negated): \"" + negConcInPath + "\"..."); File queryOutputFile = new File(negConcInPath); String queryOutputStr = readFile(queryOutputFile); _logger.info("OK\n"); DatalogQueryResultCombinedParser parser = new DatalogQueryResultCombinedParser( queryOutputStr, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); ParserRuleContext tree = parse(parser, negConcInPath); _logger.info("Computing concretizer queries..."); ParseTreeWalker walker = new ParseTreeWalker(); DatalogQueryResultExtractor extractor = new DatalogQueryResultExtractor( _settings.concretizeUnique(), true); walker.walk(extractor, tree); _logger.info("OK\n"); List<ConcretizerQuery> currentQueries = extractor .getConcretizerQueries(); if (concretizerQueries.size() == 0) { concretizerQueries.addAll(currentQueries); } else { concretizerQueries = ConcretizerQuery.crossProduct( concretizerQueries, currentQueries); } } } for (int i = 0; i < concretizerQueries.size(); i++) { ConcretizerQuery cq = concretizerQueries.get(i); String concQueryPath = _settings.getConcretizerOutputFilePath() + "-" + i + ".smt2"; _logger.info("Writing concretizer query file: \"" + concQueryPath + "\"..."); writeFile(concQueryPath, cq.getText()); _logger.info("OK\n"); } printElapsedTime(); } private LogicBloxFrontend connect() { boolean assumedToExist = !_settings.createWorkspace(); String workspaceMaster = _settings.getWorkspaceName(); if (assumedToExist) { String lbHostname = readFile(new File( _settings.getJobLogicBloxHostnamePath())); _settings.setConnectBloxHost(lbHostname); } LogicBloxFrontend lbFrontend = null; try { lbFrontend = initFrontend(assumedToExist, workspaceMaster); } catch (LBInitializationException e) { throw new BatfishException("Failed to connect to LogicBlox", e); } return lbFrontend; } private Map<String, Configuration> convertConfigurations( Map<String, VendorConfiguration> vendorConfigurations) { boolean processingError = false; Map<String, Configuration> configurations = new TreeMap<String, Configuration>(); _logger .info("\n*** CONVERTING VENDOR CONFIGURATIONS TO INDEPENDENT FORMAT ***\n"); resetTimer(); boolean pedanticAsError = _settings.getPedanticAsError(); boolean pedanticRecord = _settings.getPedanticRecord(); boolean redFlagAsError = _settings.getRedFlagAsError(); boolean redFlagRecord = _settings.getRedFlagRecord(); boolean unimplementedAsError = _settings.getUnimplementedAsError(); boolean unimplementedRecord = _settings.getUnimplementedRecord(); for (String name : vendorConfigurations.keySet()) { _logger.debug("Processing: \"" + name + "\""); VendorConfiguration vc = vendorConfigurations.get(name); Warnings warnings = new Warnings(pedanticAsError, pedanticRecord, redFlagAsError, redFlagRecord, unimplementedAsError, unimplementedRecord, false); try { Configuration config = vc .toVendorIndependentConfiguration(warnings); configurations.put(name, config); _logger.debug(" ...OK\n"); } catch (BatfishException e) { _logger.fatal("...CONVERSION ERROR\n"); _logger.fatal(ExceptionUtils.getStackTrace(e)); processingError = true; if (_settings.exitOnParseError()) { break; } else { continue; } } finally { for (String warning : warnings.getRedFlagWarnings()) { _logger.redflag(warning); } for (String warning : warnings.getUnimplementedWarnings()) { _logger.unimplemented(warning); } for (String warning : warnings.getPedanticWarnings()) { _logger.pedantic(warning); } } } if (processingError) { throw new BatfishException("Vendor conversion error(s)"); } else { printElapsedTime(); return configurations; } } public Map<String, Configuration> deserializeConfigurations( String serializedConfigPath) { _logger .info("\n*** DESERIALIZING VENDOR-INDEPENDENT CONFIGURATION STRUCTURES ***\n"); resetTimer(); Map<String, Configuration> configurations = new TreeMap<String, Configuration>(); File dir = new File(serializedConfigPath); File[] serializedConfigs = dir.listFiles(); if (serializedConfigs == null) { throw new BatfishException( "Error reading vendor-independent configs directory"); } for (File serializedConfig : serializedConfigs) { String name = serializedConfig.getName(); _logger.debug("Reading config: \"" + serializedConfig + "\""); Object object = deserializeObject(serializedConfig); Configuration c = (Configuration) object; configurations.put(name, c); _logger.debug(" ...OK\n"); } disableBlacklistedInterface(configurations); disableBlacklistedNode(configurations); printElapsedTime(); return configurations; } private Object deserializeObject(File inputFile) { FileInputStream fis; Object o = null; ObjectInputStream ois; try { fis = new FileInputStream(inputFile); if (!isJavaSerializationData(inputFile)) { XStream xstream = new XStream(new DomDriver("UTF-8")); ois = xstream.createObjectInputStream(fis); } else { ois = new ObjectInputStream(fis); } o = ois.readObject(); ois.close(); } catch (IOException | ClassNotFoundException e) { throw new BatfishException("Failed to deserialize object from file: " + inputFile.toString(), e); } return o; } public Map<String, VendorConfiguration> deserializeVendorConfigurations( String serializedVendorConfigPath) { _logger.info("\n*** DESERIALIZING VENDOR CONFIGURATION STRUCTURES ***\n"); resetTimer(); Map<String, VendorConfiguration> vendorConfigurations = new TreeMap<String, VendorConfiguration>(); File dir = new File(serializedVendorConfigPath); File[] serializedConfigs = dir.listFiles(); if (serializedConfigs == null) { throw new BatfishException("Error reading vendor configs directory"); } for (File serializedConfig : serializedConfigs) { String name = serializedConfig.getName(); _logger.debug("Reading vendor config: \"" + serializedConfig + "\""); Object object = deserializeObject(serializedConfig); VendorConfiguration vc = (VendorConfiguration) object; vendorConfigurations.put(name, vc); _logger.debug("...OK\n"); } printElapsedTime(); return vendorConfigurations; } private void disableBlacklistedInterface( Map<String, Configuration> configurations) { String blacklistInterfaceString = _settings.getBlacklistInterfaceString(); if (blacklistInterfaceString != null) { String[] blacklistInterfaceStringParts = blacklistInterfaceString .split(","); String blacklistInterfaceNode = blacklistInterfaceStringParts[0]; String blacklistInterfaceName = blacklistInterfaceStringParts[1]; Configuration c = configurations.get(blacklistInterfaceNode); Interface i = c.getInterfaces().get(blacklistInterfaceName); i.setActive(false); } } private void disableBlacklistedNode(Map<String, Configuration> configurations) { String blacklistNode = _settings.getBlacklistNode(); if (blacklistNode != null) { if (!configurations.containsKey(blacklistNode)) { throw new BatfishException("Cannot blacklist non-existent node: " + blacklistNode); } Configuration configuration = configurations.get(blacklistNode); for (Interface iface : configuration.getInterfaces().values()) { iface.setActive(false); } } } private void dumpFacts(Map<String, StringBuilder> factBins) { _logger.info("\n*** DUMPING FACTS ***\n"); resetTimer(); Path factsDir = Paths.get(_settings.getDumpFactsDir()); try { Files.createDirectories(factsDir); for (String factsFilename : factBins.keySet()) { String facts = factBins.get(factsFilename).toString(); Path factsFilePath = factsDir.resolve(factsFilename); _logger.info("Writing: \"" + factsFilePath.toAbsolutePath().toString() + "\"\n"); FileUtils.write(factsFilePath.toFile(), facts); } } catch (IOException e) { throw new BatfishException("Failed to write fact dump file", e); } printElapsedTime(); } private void dumpInterfaceDescriptions(String testRigPath, String outputPath) { Map<File, String> configurationData = readConfigurationFiles(testRigPath); Map<String, VendorConfiguration> configs = parseVendorConfigurations(configurationData); Map<String, VendorConfiguration> sortedConfigs = new TreeMap<String, VendorConfiguration>(); sortedConfigs.putAll(configs); StringBuilder sb = new StringBuilder(); for (VendorConfiguration vconfig : sortedConfigs.values()) { String node = vconfig.getHostname(); CiscoVendorConfiguration config = null; try { config = (CiscoVendorConfiguration) vconfig; } catch (ClassCastException e) { continue; } Map<String, org.batfish.representation.cisco.Interface> sortedInterfaces = new TreeMap<String, org.batfish.representation.cisco.Interface>(); sortedInterfaces.putAll(config.getInterfaces()); for (org.batfish.representation.cisco.Interface iface : sortedInterfaces .values()) { String iname = iface.getName(); String description = iface.getDescription(); sb.append(node + " " + iname); if (description != null) { sb.append(" \"" + description + "\""); } sb.append("\n"); } } String output = sb.toString(); writeFile(outputPath, output); } private String flatten(String input) { JuniperCombinedParser jparser = new JuniperCombinedParser(input, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); ParserRuleContext jtree = parse(jparser); JuniperFlattener flattener = new JuniperFlattener(); ParseTreeWalker walker = new ParseTreeWalker(); walker.walk(flattener, jtree); return flattener.getFlattenedConfigurationText(); } private void flatten(String inputPath, String outputPath) { File inputFolder = new File(inputPath); File[] configs = inputFolder.listFiles(); if (configs == null) { throw new BatfishException("Error reading configs from input test rig"); } try { Files.createDirectories(Paths.get(outputPath)); } catch (IOException e) { throw new BatfishException( "Could not create output testrig directory", e); } for (File config : configs) { String name = config.getName(); _logger.debug("Reading config: \"" + config + "\""); String configText = readFile(config); _logger.debug("..OK\n"); File outputFile = Paths.get(outputPath, name).toFile(); String outputFileAsString = outputFile.toString(); if (configText.charAt(0) == '#' && !configText.matches("(?m)set version.*")) { _logger.debug("Flattening config to \"" + outputFileAsString + "\"..."); String flatConfigText = flatten(configText); writeFile(outputFileAsString, flatConfigText); } else { _logger.debug("Copying unmodified config to \"" + outputFileAsString + "\"..."); writeFile(outputFileAsString, configText); _logger.debug("OK\n"); } } } private void genBlackHoleQueries() { _logger.info("\n*** GENERATING BLACK-HOLE QUERIES ***\n"); resetTimer(); String fiQueryBasePath = _settings.getBlackHoleQueryPath(); String nodeSetPath = _settings.getNodeSetPath(); _logger.info("Reading node set from : \"" + nodeSetPath + "\"..."); NodeSet nodes = (NodeSet) deserializeObject(new File(nodeSetPath)); _logger.info("OK\n"); for (String hostname : nodes) { QuerySynthesizer synth = new FailureInconsistencyBlackHoleQuerySynthesizer( hostname); String queryText = synth.getQueryText(); String fiQueryPath; fiQueryPath = fiQueryBasePath + "-" + hostname + ".smt2"; _logger.info("Writing query to: \"" + fiQueryPath + "\"..."); writeFile(fiQueryPath, queryText); _logger.info("OK\n"); } printElapsedTime(); } private void generateOspfConfigs(String topologyPath, String outputPath) { File topologyFilePath = new File(topologyPath); Topology topology = parseTopology(topologyFilePath); Map<String, Configuration> configs = new TreeMap<String, Configuration>(); NodeSet allNodes = new NodeSet(); Map<NodeInterfacePair, Set<NodeInterfacePair>> interfaceMap = new HashMap<NodeInterfacePair, Set<NodeInterfacePair>>(); // first we collect set of all mentioned nodes, and build mapping from // each interface to the set of interfaces that connect to each other for (Edge edge : topology.getEdges()) { allNodes.add(edge.getNode1()); allNodes.add(edge.getNode2()); NodeInterfacePair interface1 = new NodeInterfacePair(edge.getNode1(), edge.getInt1()); NodeInterfacePair interface2 = new NodeInterfacePair(edge.getNode2(), edge.getInt2()); Set<NodeInterfacePair> interfaceSet = interfaceMap.get(interface1); if (interfaceSet == null) { interfaceSet = new HashSet<NodeInterfacePair>(); } interfaceMap.put(interface1, interfaceSet); interfaceMap.put(interface2, interfaceSet); interfaceSet.add(interface1); interfaceSet.add(interface2); } // then we create configs for every mentioned node for (String hostname : allNodes) { Configuration config = new Configuration(hostname); configs.put(hostname, config); } // Now we create interfaces for each edge and record the number of // neighbors so we know how large to make the subnet long currentStartingIpAsLong = new Ip(GEN_OSPF_STARTING_IP).asLong(); Set<Set<NodeInterfacePair>> interfaceSets = new HashSet<Set<NodeInterfacePair>>(); interfaceSets.addAll(interfaceMap.values()); for (Set<NodeInterfacePair> interfaceSet : interfaceSets) { int numInterfaces = interfaceSet.size(); if (numInterfaces < 2) { throw new BatfishException( "The following interface set contains less than two interfaces: " + interfaceSet.toString()); } int numHostBits = 0; for (int shiftedValue = numInterfaces - 1; shiftedValue != 0; shiftedValue >>= 1, numHostBits++) { } int subnetBits = 32 - numHostBits; int offset = 0; for (NodeInterfacePair currentPair : interfaceSet) { Ip ip = new Ip(currentStartingIpAsLong + offset); Prefix prefix = new Prefix(ip, subnetBits); String ifaceName = currentPair.getInterface(); Interface iface = new Interface(ifaceName); iface.setPrefix(prefix); // dirty hack for setting bandwidth for now double ciscoBandwidth = org.batfish.representation.cisco.Interface .getDefaultBandwidth(ifaceName); double juniperBandwidth = org.batfish.representation.juniper.Interface .getDefaultBandwidthByName(ifaceName); double bandwidth = Math.min(ciscoBandwidth, juniperBandwidth); iface.setBandwidth(bandwidth); String hostname = currentPair.getHostname(); Configuration config = configs.get(hostname); config.getInterfaces().put(ifaceName, iface); offset++; } currentStartingIpAsLong += (1 << numHostBits); } for (Configuration config : configs.values()) { // use cisco arbitrarily config.setVendor(CiscoVendorConfiguration.VENDOR_NAME); OspfProcess proc = new OspfProcess(); config.setOspfProcess(proc); proc.setReferenceBandwidth(org.batfish.representation.cisco.OspfProcess.DEFAULT_REFERENCE_BANDWIDTH); long backboneArea = 0; OspfArea area = new OspfArea(backboneArea); proc.getAreas().put(backboneArea, area); area.getInterfaces().addAll(config.getInterfaces().values()); } serializeIndependentConfigs(configs, outputPath); } private void generateStubs(String inputRole, int stubAs, String interfaceDescriptionRegex, String configPath) { Map<String, Configuration> configs = deserializeConfigurations(configPath); Pattern pattern = Pattern.compile(interfaceDescriptionRegex); Map<String, Configuration> stubConfigurations = new TreeMap<String, Configuration>(); _logger.info("\n*** GENERATING STUBS ***\n"); resetTimer(); // load old node-roles to be updated at end RoleSet stubRoles = new RoleSet(); stubRoles.add(STUB_ROLE); File nodeRolesPath = new File(_settings.getNodeRolesPath()); _logger.info("Deserializing old node-roles mappings: \"" + nodeRolesPath + "\" ..."); NodeRoleMap nodeRoles = (NodeRoleMap) deserializeObject(nodeRolesPath); _logger.info("OK\n"); // create origination policy common to all stubs String stubOriginationPolicyName = "~STUB_ORIGINATION_POLICY~"; PolicyMap stubOriginationPolicy = new PolicyMap(stubOriginationPolicyName); PolicyMapClause clause = new PolicyMapClause(); stubOriginationPolicy.getClauses().add(clause); String stubOriginationRouteFilterListName = "~STUB_ORIGINATION_ROUTE_FILTER~"; RouteFilterList rf = new RouteFilterList( stubOriginationRouteFilterListName); RouteFilterLine rfl = new RouteFilterLine(LineAction.ACCEPT, Prefix.ZERO, new SubRange(0, 0)); rf.addLine(rfl); PolicyMapMatchRouteFilterListLine matchLine = new PolicyMapMatchRouteFilterListLine( Collections.singleton(rf)); clause.getMatchLines().add(matchLine); clause.setAction(PolicyMapAction.PERMIT); // create flow sink interface common to all stubs String flowSinkName = "TenGibabitEthernet100/100"; Interface flowSink = new Interface(flowSinkName); flowSink.setPrefix(Prefix.ZERO); flowSink.setActive(true); flowSink.setBandwidth(10E9d); Set<String> skipWarningNodes = new HashSet<String>(); for (Configuration config : configs.values()) { if (!config.getRoles().contains(inputRole)) { continue; } for (BgpNeighbor neighbor : config.getBgpProcess().getNeighbors() .values()) { if (!neighbor.getRemoteAs().equals(stubAs)) { continue; } Prefix neighborPrefix = neighbor.getPrefix(); if (neighborPrefix.getPrefixLength() != 32) { throw new BatfishException( "do not currently handle generating stubs based on dynamic bgp sessions"); } Ip neighborAddress = neighborPrefix.getAddress(); int edgeAs = neighbor.getLocalAs(); /* * Now that we have the ip address of the stub, we want to find the * interface that connects to it. We will extract the hostname for * the stub from the description of this interface using the * supplied regex. */ boolean found = false; for (Interface iface : config.getInterfaces().values()) { Prefix prefix = iface.getPrefix(); if (prefix == null || !prefix.contains(neighborAddress)) { continue; } // the neighbor address falls within the network assigned to this // interface, so now we check the description String description = iface.getDescription(); Matcher matcher = pattern.matcher(description); if (matcher.find()) { String hostname = matcher.group(1); if (configs.containsKey(hostname)) { Configuration duplicateConfig = configs.get(hostname); if (!duplicateConfig.getRoles().contains(STUB_ROLE) || duplicateConfig.getRoles().size() != 1) { throw new BatfishException( "A non-generated node with hostname: \"" + hostname + "\" already exists in network under analysis"); } else { if (!skipWarningNodes.contains(hostname)) { _logger .warn("WARNING: Overwriting previously generated node: \"" + hostname + "\"\n"); skipWarningNodes.add(hostname); } } } found = true; Configuration stub = stubConfigurations.get(hostname); // create stub if it doesn't exist yet if (stub == null) { stub = new Configuration(hostname); stubConfigurations.put(hostname, stub); stub.getInterfaces().put(flowSinkName, flowSink); stub.setBgpProcess(new BgpProcess()); stub.getPolicyMaps().put(stubOriginationPolicyName, stubOriginationPolicy); stub.getRouteFilterLists().put( stubOriginationRouteFilterListName, rf); stub.setVendor(CiscoVendorConfiguration.VENDOR_NAME); stub.setRoles(stubRoles); nodeRoles.put(hostname, stubRoles); } // create interface that will on which peering will occur Map<String, Interface> stubInterfaces = stub.getInterfaces(); String stubInterfaceName = "TenGigabitEthernet0/" + (stubInterfaces.size() - 1); Interface stubInterface = new Interface(stubInterfaceName); stubInterfaces.put(stubInterfaceName, stubInterface); stubInterface.setPrefix(new Prefix(neighborAddress, prefix .getPrefixLength())); stubInterface.setActive(true); stubInterface.setBandwidth(10E9d); // create neighbor within bgp process BgpNeighbor edgeNeighbor = new BgpNeighbor(prefix); edgeNeighbor.getOriginationPolicies().add( stubOriginationPolicy); edgeNeighbor.setRemoteAs(edgeAs); edgeNeighbor.setLocalAs(stubAs); edgeNeighbor.setSendCommunity(true); edgeNeighbor.setDefaultMetric(0); stub.getBgpProcess().getNeighbors() .put(edgeNeighbor.getPrefix(), edgeNeighbor); break; } else { throw new BatfishException( "Unable to derive stub hostname from interface description: \"" + description + "\" using regex: \"" + interfaceDescriptionRegex + "\""); } } if (!found) { throw new BatfishException( "Could not determine stub hostname corresponding to ip: \"" + neighborAddress.toString() + "\" listed as neighbor on router: \"" + config.getHostname() + "\""); } } } // write updated node-roles mappings to disk _logger.info("Serializing updated node-roles mappings: \"" + nodeRolesPath + "\" ..."); serializeObject(nodeRoles, nodeRolesPath); _logger.info("OK\n"); printElapsedTime(); // write stubs to disk serializeIndependentConfigs(stubConfigurations, configPath); } private void genMultipathQueries() { _logger.info("\n*** GENERATING MULTIPATH-INCONSISTENCY QUERIES ***\n"); resetTimer(); String mpiQueryBasePath = _settings.getMultipathInconsistencyQueryPath(); String nodeSetPath = _settings.getNodeSetPath(); String nodeSetTextPath = nodeSetPath + ".txt"; _logger.info("Reading node set from : \"" + nodeSetPath + "\"..."); NodeSet nodes = (NodeSet) deserializeObject(new File(nodeSetPath)); _logger.info("OK\n"); for (String hostname : nodes) { QuerySynthesizer synth = new MultipathInconsistencyQuerySynthesizer( hostname); String queryText = synth.getQueryText(); String mpiQueryPath = mpiQueryBasePath + "-" + hostname + ".smt2"; _logger.info("Writing query to: \"" + mpiQueryPath + "\"..."); writeFile(mpiQueryPath, queryText); _logger.info("OK\n"); } _logger.info("Writing node lines for next stage..."); StringBuilder sb = new StringBuilder(); for (String node : nodes) { sb.append(node + "\n"); } writeFile(nodeSetTextPath, sb.toString()); _logger.info("OK\n"); printElapsedTime(); } private void genReachableQueries() { _logger.info("\n*** GENERATING REACHABLE QUERIES ***\n"); resetTimer(); String queryBasePath = _settings.getReachableQueryPath(); String nodeSetPath = _settings.getNodeSetPath(); String acceptNode = _settings.getAcceptNode(); String blacklistedNode = _settings.getBlacklistNode(); _logger.info("Reading node set from : \"" + nodeSetPath + "\"..."); NodeSet nodes = (NodeSet) deserializeObject(new File(nodeSetPath)); _logger.info("OK\n"); for (String hostname : nodes) { if (hostname.equals(acceptNode) || hostname.equals(blacklistedNode)) { continue; } QuerySynthesizer synth = new ReachableQuerySynthesizer(hostname, acceptNode); String queryText = synth.getQueryText(); String queryPath; queryPath = queryBasePath + "-" + hostname + ".smt2"; _logger.info("Writing query to: \"" + queryPath + "\"..."); writeFile(queryPath, queryText); _logger.info("OK\n"); } printElapsedTime(); } private void genRoleReachabilityQueries() { _logger.info("\n*** GENERATING NODE-TO-ROLE QUERIES ***\n"); resetTimer(); String queryBasePath = _settings.getRoleReachabilityQueryPath(); String nodeSetPath = _settings.getNodeSetPath(); String nodeSetTextPath = nodeSetPath + ".txt"; String roleSetTextPath = _settings.getRoleSetPath(); String nodeRolesPath = _settings.getNodeRolesPath(); String iterationsPath = nodeRolesPath + ".iterations"; _logger.info("Reading node set from : \"" + nodeSetPath + "\"..."); NodeSet nodes = (NodeSet) deserializeObject(new File(nodeSetPath)); _logger.info("OK\n"); _logger.info("Reading node roles from : \"" + nodeRolesPath + "\"..."); NodeRoleMap nodeRoles = (NodeRoleMap) deserializeObject(new File( nodeRolesPath)); _logger.info("OK\n"); RoleNodeMap roleNodes = nodeRoles.toRoleNodeMap(); for (String hostname : nodes) { for (String role : roleNodes.keySet()) { QuerySynthesizer synth = new RoleReachabilityQuerySynthesizer( hostname, role); String queryText = synth.getQueryText(); String queryPath = queryBasePath + "-" + hostname + "-" + role + ".smt2"; _logger.info("Writing query to: \"" + queryPath + "\"..."); writeFile(queryPath, queryText); _logger.info("OK\n"); } } _logger.info("Writing node lines for next stage..."); StringBuilder sbNodes = new StringBuilder(); for (String node : nodes) { sbNodes.append(node + "\n"); } writeFile(nodeSetTextPath, sbNodes.toString()); _logger.info("OK\n"); StringBuilder sbRoles = new StringBuilder(); _logger.info("Writing role lines for next stage..."); sbRoles = new StringBuilder(); for (String role : roleNodes.keySet()) { sbRoles.append(role + "\n"); } writeFile(roleSetTextPath, sbRoles.toString()); _logger.info("OK\n"); _logger .info("Writing role-node-role iteration ordering lines for concretizer stage..."); StringBuilder sbIterations = new StringBuilder(); for (Entry<String, NodeSet> roleNodeEntry : roleNodes.entrySet()) { String transmittingRole = roleNodeEntry.getKey(); NodeSet transmittingNodes = roleNodeEntry.getValue(); if (transmittingNodes.size() < 2) { continue; } String[] tNodeArray = transmittingNodes.toArray(new String[] {}); String masterNode = tNodeArray[0]; for (int i = 1; i < tNodeArray.length; i++) { String slaveNode = tNodeArray[i]; for (String receivingRole : roleNodes.keySet()) { String iterationLine = transmittingRole + ":" + masterNode + ":" + slaveNode + ":" + receivingRole + "\n"; sbIterations.append(iterationLine); } } } writeFile(iterationsPath, sbIterations.toString()); _logger.info("OK\n"); printElapsedTime(); } private void genRoleTransitQueries() { _logger.info("\n*** GENERATING ROLE-TO-NODE QUERIES ***\n"); resetTimer(); String queryBasePath = _settings.getRoleTransitQueryPath(); String nodeSetPath = _settings.getNodeSetPath(); String nodeSetTextPath = nodeSetPath + ".txt"; String roleSetTextPath = _settings.getRoleSetPath(); String nodeRolesPath = _settings.getNodeRolesPath(); String roleNodesPath = _settings.getRoleNodesPath(); String iterationsPath = nodeRolesPath + ".rtiterations"; String constraintsIterationsPath = nodeRolesPath + ".rtconstraintsiterations"; _logger.info("Reading node set from : \"" + nodeSetPath + "\"..."); NodeSet nodes = (NodeSet) deserializeObject(new File(nodeSetPath)); _logger.info("OK\n"); _logger.info("Reading node roles from : \"" + nodeRolesPath + "\"..."); NodeRoleMap nodeRoles = (NodeRoleMap) deserializeObject(new File( nodeRolesPath)); _logger.info("OK\n"); RoleNodeMap roleNodes = nodeRoles.toRoleNodeMap(); for (Entry<String, NodeSet> sourceEntry : roleNodes.entrySet()) { String sourceRole = sourceEntry.getKey(); for (Entry<String, NodeSet> transitEntry : roleNodes.entrySet()) { String transitRole = transitEntry.getKey(); if (transitRole.equals(sourceRole)) { continue; } NodeSet transitNodes = transitEntry.getValue(); for (String transitNode : transitNodes) { QuerySynthesizer synth = new RoleTransitQuerySynthesizer( sourceRole, transitNode); String queryText = synth.getQueryText(); String queryPath = queryBasePath + "-" + transitNode + "-" + sourceRole + ".smt2"; _logger.info("Writing query to: \"" + queryPath + "\"..."); writeFile(queryPath, queryText); _logger.info("OK\n"); } } } _logger.info("Writing node lines for next stage..."); StringBuilder sbNodes = new StringBuilder(); for (String node : nodes) { sbNodes.append(node + "\n"); } writeFile(nodeSetTextPath, sbNodes.toString()); _logger.info("OK\n"); StringBuilder sbRoles = new StringBuilder(); _logger.info("Writing role lines for next stage..."); sbRoles = new StringBuilder(); for (String role : roleNodes.keySet()) { sbRoles.append(role + "\n"); } writeFile(roleSetTextPath, sbRoles.toString()); _logger.info("OK\n"); // not actually sure if this is necessary StringBuilder sbRoleNodes = new StringBuilder(); _logger.info("Writing role-node mappings for concretizer stage..."); sbRoleNodes = new StringBuilder(); for (Entry<String, NodeSet> e : roleNodes.entrySet()) { String role = e.getKey(); NodeSet currentNodes = e.getValue(); sbRoleNodes.append(role + ":"); for (String node : currentNodes) { sbRoleNodes.append(node + ","); } sbRoleNodes.append(role + "\n"); } writeFile(roleNodesPath, sbRoleNodes.toString()); _logger .info("Writing transitrole-transitnode-sourcerole iteration ordering lines for constraints stage..."); StringBuilder sbConstraintsIterations = new StringBuilder(); for (Entry<String, NodeSet> roleNodeEntry : roleNodes.entrySet()) { String transitRole = roleNodeEntry.getKey(); NodeSet transitNodes = roleNodeEntry.getValue(); if (transitNodes.size() < 2) { continue; } for (String sourceRole : roleNodes.keySet()) { if (sourceRole.equals(transitRole)) { continue; } for (String transitNode : transitNodes) { String iterationLine = transitRole + ":" + transitNode + ":" + sourceRole + "\n"; sbConstraintsIterations.append(iterationLine); } } } writeFile(constraintsIterationsPath, sbConstraintsIterations.toString()); _logger.info("OK\n"); _logger .info("Writing transitrole-master-slave-sourcerole iteration ordering lines for concretizer stage..."); StringBuilder sbIterations = new StringBuilder(); for (Entry<String, NodeSet> roleNodeEntry : roleNodes.entrySet()) { String transitRole = roleNodeEntry.getKey(); NodeSet transitNodes = roleNodeEntry.getValue(); if (transitNodes.size() < 2) { continue; } String[] tNodeArray = transitNodes.toArray(new String[] {}); String masterNode = tNodeArray[0]; for (int i = 1; i < tNodeArray.length; i++) { String slaveNode = tNodeArray[i]; for (String sourceRole : roleNodes.keySet()) { if (sourceRole.equals(transitRole)) { continue; } String iterationLine = transitRole + ":" + masterNode + ":" + slaveNode + ":" + sourceRole + "\n"; sbIterations.append(iterationLine); } } } writeFile(iterationsPath, sbIterations.toString()); _logger.info("OK\n"); printElapsedTime(); } private void genZ3(Map<String, Configuration> configurations) { _logger.info("\n*** GENERATING Z3 LOGIC ***\n"); resetTimer(); Path flowSinkSetPath = Paths.get(_settings.getDataPlaneDir(), FLOW_SINKS_FILENAME); Path fibsPath = Paths.get(_settings.getDataPlaneDir(), FIBS_FILENAME); Path prFibsPath = Paths.get(_settings.getDataPlaneDir(), FIBS_POLICY_ROUTE_NEXT_HOP_FILENAME); Path edgesPath = Paths.get(_settings.getDataPlaneDir(), EDGES_FILENAME); _logger.info("Deserializing flow sink interface set: \"" + flowSinkSetPath.toString() + "\"..."); FlowSinkSet flowSinks = (FlowSinkSet) deserializeObject(flowSinkSetPath .toFile()); _logger.info("OK\n"); _logger.info("Deserializing destination route fibs: \"" + fibsPath.toString() + "\"..."); FibMap fibs = (FibMap) deserializeObject(fibsPath.toFile()); _logger.info("OK\n"); _logger.info("Deserializing policy route fibs: \"" + prFibsPath.toString() + "\"..."); PolicyRouteFibNodeMap prFibs = (PolicyRouteFibNodeMap) deserializeObject(prFibsPath .toFile()); _logger.info("OK\n"); _logger.info("Deserializing toplogy edges: \"" + edgesPath.toString() + "\"..."); EdgeSet topologyEdges = (EdgeSet) deserializeObject(edgesPath.toFile()); _logger.info("OK\n"); _logger.info("Synthesizing Z3 logic..."); Synthesizer s = new Synthesizer(configurations, fibs, prFibs, topologyEdges, _settings.getSimplify(), flowSinks); String result = s.synthesize(); List<String> warnings = s.getWarnings(); int numWarnings = warnings.size(); if (numWarnings == 0) { _logger.info("OK\n"); } else { for (String warning : warnings) { _logger.warn(warning); } } String outputPath = _settings.getZ3File(); _logger.info("Writing Z3 logic: \"" + outputPath + "\"..."); File z3Out = new File(outputPath); z3Out.delete(); writeFile(outputPath, result); _logger.info("OK\n"); String nodeSetPath = _settings.getNodeSetPath(); _logger.info("Serializing node set: \"" + nodeSetPath + "\"..."); NodeSet nodeSet = s.getNodeSet(); serializeObject(nodeSet, new File(nodeSetPath)); _logger.info("OK\n"); printElapsedTime(); } public Map<String, Configuration> getConfigurations( String serializedVendorConfigPath) { Map<String, VendorConfiguration> vendorConfigurations = deserializeVendorConfigurations(serializedVendorConfigPath); Map<String, Configuration> configurations = convertConfigurations(vendorConfigurations); return configurations; } private double getElapsedTime(long beforeTime) { long difference = System.currentTimeMillis() - beforeTime; double seconds = difference / 1000d; return seconds; } private FlowSinkSet getFlowSinkSet(LogicBloxFrontend lbFrontend) { FlowSinkSet flowSinks = new FlowSinkSet(); String qualifiedName = _predicateInfo.getPredicateNames().get( FLOW_SINK_PREDICATE_NAME); Relation flowSinkRelation = lbFrontend.queryPredicate(qualifiedName); List<String> nodes = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, nodes, flowSinkRelation.getColumns().get(0)); List<String> interfaces = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, interfaces, flowSinkRelation.getColumns().get(1)); for (int i = 0; i < nodes.size(); i++) { String node = nodes.get(i); String iface = interfaces.get(i); FlowSinkInterface f = new FlowSinkInterface(node, iface); flowSinks.add(f); } return flowSinks; } private List<String> getHelpPredicates(Map<String, String> predicateSemantics) { Set<String> helpPredicateSet = new LinkedHashSet<String>(); _settings.getHelpPredicates(); if (_settings.getHelpPredicates() == null) { helpPredicateSet.addAll(predicateSemantics.keySet()); } else { helpPredicateSet.addAll(_settings.getHelpPredicates()); } List<String> helpPredicates = new ArrayList<String>(); helpPredicates.addAll(helpPredicateSet); Collections.sort(helpPredicates); return helpPredicates; } private PolicyRouteFibNodeMap getPolicyRouteFibNodeMap( Relation fibPolicyRouteNextHops, LogicBloxFrontend lbFrontend) { PolicyRouteFibNodeMap nodeMap = new PolicyRouteFibNodeMap(); List<String> nodeList = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, nodeList, fibPolicyRouteNextHops.getColumns().get(0)); List<String> ipList = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_IP, ipList, fibPolicyRouteNextHops.getColumns().get(1)); List<String> outInterfaces = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, outInterfaces, fibPolicyRouteNextHops.getColumns().get(2)); List<String> inNodes = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, inNodes, fibPolicyRouteNextHops.getColumns().get(3)); List<String> inInterfaces = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, inInterfaces, fibPolicyRouteNextHops.getColumns().get(4)); int size = nodeList.size(); for (int i = 0; i < size; i++) { String nodeOut = nodeList.get(i); String nodeIn = inNodes.get(i); Ip ip = new Ip(ipList.get(i)); String ifaceOut = outInterfaces.get(i); String ifaceIn = inInterfaces.get(i); PolicyRouteFibIpMap ipMap = nodeMap.get(nodeOut); if (ipMap == null) { ipMap = new PolicyRouteFibIpMap(); nodeMap.put(nodeOut, ipMap); } EdgeSet edges = ipMap.get(ip); if (edges == null) { edges = new EdgeSet(); ipMap.put(ip, edges); } Edge newEdge = new Edge(nodeOut, ifaceOut, nodeIn, ifaceIn); edges.add(newEdge); } return nodeMap; } public PredicateInfo getPredicateInfo(Map<String, String> logicFiles) { // Get predicate semantics from rules file _logger.info("\n*** PARSING PREDICATE INFO ***\n"); resetTimer(); String predicateInfoPath = getPredicateInfoPath(); PredicateInfo predicateInfo = (PredicateInfo) deserializeObject(new File( predicateInfoPath)); printElapsedTime(); return predicateInfo; } private String getPredicateInfoPath() { File logicDir = retrieveLogicDir(); return Paths.get(logicDir.toString(), PREDICATE_INFO_FILENAME).toString(); } private FibMap getRouteForwardingRules(Relation fibNetworkForward, LogicBloxFrontend lbFrontend) { FibMap fibs = new FibMap(); List<String> nameList = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, nameList, fibNetworkForward.getColumns().get(0)); List<String> networkList = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_INDEX_NETWORK, networkList, fibNetworkForward.getColumns().get(1)); List<String> interfaceList = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, interfaceList, fibNetworkForward.getColumns().get(2)); List<String> nextHopList = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, nextHopList, fibNetworkForward.getColumns().get(3)); List<String> nextHopIntList = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, nextHopIntList, fibNetworkForward.getColumns().get(4)); String currentHostname = ""; Map<String, Integer> startIndices = new HashMap<String, Integer>(); Map<String, Integer> endIndices = new HashMap<String, Integer>(); for (int i = 0; i < nameList.size(); i++) { String currentRowHostname = nameList.get(i); if (!currentHostname.equals(currentRowHostname)) { if (i > 0) { endIndices.put(currentHostname, i - 1); } currentHostname = currentRowHostname; startIndices.put(currentHostname, i); } } endIndices.put(currentHostname, nameList.size() - 1); for (String hostname : startIndices.keySet()) { FibSet fibRows = new FibSet(); fibs.put(hostname, fibRows); int startIndex = startIndices.get(hostname); int endIndex = endIndices.get(hostname); for (int i = startIndex; i <= endIndex; i++) { String networkStr = networkList.get(i); Prefix prefix = new Prefix(networkStr); String iface = interfaceList.get(i); String nextHop = nextHopList.get(i); String nextHopInt = nextHopIntList.get(i); fibRows.add(new FibRow(prefix, iface, nextHop, nextHopInt)); } } return fibs; } private Map<String, String> getSemanticsFiles() { final Map<String, String> semanticsFiles = new HashMap<String, String>(); File logicDirFile = retrieveLogicDir(); FileVisitor<Path> visitor = new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { String pathString = file.toString(); if (pathString.endsWith(".semantics")) { String contents = FileUtils.readFileToString(file.toFile()); semanticsFiles.put(pathString, contents); } return super.visitFile(file, attrs); } }; try { Files.walkFileTree(Paths.get(logicDirFile.getAbsolutePath()), visitor); } catch (IOException e) { e.printStackTrace(); } cleanupLogicDir(); return semanticsFiles; } public EdgeSet getTopologyEdges(LogicBloxFrontend lbFrontend) { EdgeSet edges = new EdgeSet(); String qualifiedName = _predicateInfo.getPredicateNames().get( TOPOLOGY_PREDICATE_NAME); Relation topologyRelation = lbFrontend.queryPredicate(qualifiedName); List<String> fromRouters = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, fromRouters, topologyRelation.getColumns().get(0)); List<String> fromInterfaces = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, fromInterfaces, topologyRelation.getColumns().get(1)); List<String> toRouters = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, toRouters, topologyRelation.getColumns().get(2)); List<String> toInterfaces = new ArrayList<String>(); lbFrontend.fillColumn(LBValueType.ENTITY_REF_STRING, toInterfaces, topologyRelation.getColumns().get(3)); for (int i = 0; i < fromRouters.size(); i++) { if (Util.isLoopback(fromInterfaces.get(i)) || Util.isLoopback(toInterfaces.get(i))) { continue; } Edge newEdge = new Edge(fromRouters.get(i), fromInterfaces.get(i), toRouters.get(i), toInterfaces.get(i)); edges.add(newEdge); } return edges; } private void histogram(String testRigPath) { Map<File, String> configurationData = readConfigurationFiles(testRigPath); Map<String, VendorConfiguration> vendorConfigurations = parseVendorConfigurations(configurationData); _logger.info("Building feature histogram..."); MultiSet<String> histogram = new TreeMultiSet<String>(); for (VendorConfiguration vc : vendorConfigurations.values()) { Set<String> unimplementedFeatures = vc.getUnimplementedFeatures(); histogram.add(unimplementedFeatures); } _logger.info("OK\n"); for (String feature : histogram.elements()) { int count = histogram.count(feature); _logger.output(feature + ": " + count + "\n"); } } private ConfigurationFormat identifyConfigurationFormat(String fileText) { char firstChar = fileText.trim().charAt(0); if (firstChar == '!') { if (fileText.contains("set prompt")) { return ConfigurationFormat.VXWORKS; } else { return ConfigurationFormat.CISCO; } } else if (fileText.contains("set hostname")) { return ConfigurationFormat.JUNIPER_SWITCH; } else if (firstChar == '#') { if (fileText.contains("set version")) { return ConfigurationFormat.FLAT_JUNIPER; } else { return ConfigurationFormat.JUNIPER; } } else { return ConfigurationFormat.UNKNOWN; } } public LogicBloxFrontend initFrontend(boolean assumedToExist, String workspace) throws LBInitializationException { _logger.info("\n*** STARTING CONNECTBLOX SESSION ***\n"); resetTimer(); LogicBloxFrontend lbFrontend = new LogicBloxFrontend( _settings.getConnectBloxHost(), _settings.getConnectBloxPort(), _settings.getLbWebPort(), _settings.getLbWebAdminPort(), workspace, assumedToExist, _logger); lbFrontend.initialize(); if (!lbFrontend.connected()) { throw new BatfishException( "Error connecting to ConnectBlox service. Please make sure service is running and try again."); } _logger.info("SUCCESS\n"); printElapsedTime(); _lbFrontends.add(lbFrontend); return lbFrontend; } private boolean isJavaSerializationData(File inputFile) { try (FileInputStream i = new FileInputStream(inputFile)) { int headerLength = JAVA_SERIALIZED_OBJECT_HEADER.length; byte[] headerBytes = new byte[headerLength]; int result = i.read(headerBytes, 0, headerLength); if (result != headerLength) { throw new BatfishException("Read wrong number of bytes"); } return Arrays.equals(headerBytes, JAVA_SERIALIZED_OBJECT_HEADER); } catch (IOException e) { throw new BatfishException("Could not read header from file: " + inputFile.toString(), e); } } private ParserRuleContext parse(BatfishCombinedParser<?, ?> parser) { ParserRuleContext tree; try { tree = parser.parse(); } catch (BatfishException e) { throw new ParserBatfishException("Parser error", e); } List<String> errors = parser.getErrors(); int numErrors = errors.size(); if (numErrors > 0) { _logger.error(numErrors + " ERROR(S)\n"); for (int i = 0; i < numErrors; i++) { String prefix = "ERROR " + (i + 1) + ": "; String msg = errors.get(i); String prefixedMsg = Util.applyPrefix(prefix, msg); _logger.error(prefixedMsg + "\n"); } throw new ParserBatfishException("Parser error(s)"); } else if (!_settings.printParseTree()) { _logger.info("OK\n"); } else { _logger.info("OK, PRINTING PARSE TREE:\n"); _logger.info(ParseTreePrettyPrinter.print(tree, parser) + "\n\n"); } return tree; } private ParserRuleContext parse(BatfishCombinedParser<?, ?> parser, String filename) { _logger.info("Parsing: \"" + filename + "\"..."); return parse(parser); } private void parseFlowsFromConstraints(StringBuilder sb, RoleNodeMap roleNodes) { Path flowConstraintsDir = Paths.get(_settings.getFlowPath()); File[] constraintsFiles = flowConstraintsDir.toFile().listFiles( new FilenameFilter() { @Override public boolean accept(File dir, String filename) { return filename.matches(".*-concrete-.*.smt2.out"); } }); if (constraintsFiles == null) { throw new BatfishException("Error reading flow constraints directory"); } for (File constraintsFile : constraintsFiles) { String flowConstraintsText = readFile(constraintsFile); ConcretizerQueryResultCombinedParser parser = new ConcretizerQueryResultCombinedParser( flowConstraintsText, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); ParserRuleContext tree = parse(parser, constraintsFile.toString()); ParseTreeWalker walker = new ParseTreeWalker(); ConcretizerQueryResultExtractor extractor = new ConcretizerQueryResultExtractor(); walker.walk(extractor, tree); String id = extractor.getId(); if (id == null) { continue; } Map<String, Long> constraints = extractor.getConstraints(); long src_ip = 0; long dst_ip = 0; long src_port = 0; long dst_port = 0; long protocol = IpProtocol.IP.number(); for (String varName : constraints.keySet()) { Long value = constraints.get(varName); switch (varName) { case Synthesizer.SRC_IP_VAR: src_ip = value; break; case Synthesizer.DST_IP_VAR: dst_ip = value; break; case Synthesizer.SRC_PORT_VAR: src_port = value; break; case Synthesizer.DST_PORT_VAR: dst_port = value; break; case Synthesizer.IP_PROTOCOL_VAR: protocol = value; break; default: throw new Error("invalid variable name"); } } // TODO: cleanup dirty hack if (roleNodes != null) { // id is role NodeSet nodes = roleNodes.get(id); for (String node : nodes) { String line = node + "|" + src_ip + "|" + dst_ip + "|" + src_port + "|" + dst_port + "|" + protocol + "\n"; sb.append(line); } } else { String node = id; String line = node + "|" + src_ip + "|" + dst_ip + "|" + src_port + "|" + dst_port + "|" + protocol + "\n"; sb.append(line); } } } private NodeRoleMap parseNodeRoles(String testRigPath) { Path rolePath = Paths.get(testRigPath, "node_roles"); String roleFileText = readFile(rolePath.toFile()); _logger.info("Parsing: \"" + rolePath.toAbsolutePath().toString() + "\""); BatfishCombinedParser<?, ?> parser = new RoleCombinedParser(roleFileText, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); RoleExtractor extractor = new RoleExtractor(); ParserRuleContext tree = parse(parser); ParseTreeWalker walker = new ParseTreeWalker(); walker.walk(extractor, tree); NodeRoleMap nodeRoles = extractor.getRoleMap(); return nodeRoles; } private Topology parseTopology(File topologyFilePath) { _logger.info("*** PARSING TOPOLOGY ***\n"); resetTimer(); String topologyFileText = readFile(topologyFilePath); BatfishCombinedParser<?, ?> parser = null; TopologyExtractor extractor = null; _logger.info("Parsing: \"" + topologyFilePath.getAbsolutePath().toString() + "\""); if (topologyFileText.startsWith("autostart")) { parser = new GNS3TopologyCombinedParser(topologyFileText, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); extractor = new GNS3TopologyExtractor(); } else if (topologyFileText.startsWith("CONFIGPARSER_TOPOLOGY")) { parser = new BatfishTopologyCombinedParser(topologyFileText, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); extractor = new BatfishTopologyExtractor(); } else if (topologyFileText.equals("")) { throw new BatfishException("...ERROR: empty topology\n"); } else { _logger.fatal("...ERROR\n"); throw new BatfishException("Topology format error"); } ParserRuleContext tree = parse(parser); ParseTreeWalker walker = new ParseTreeWalker(); walker.walk(extractor, tree); Topology topology = extractor.getTopology(); printElapsedTime(); return topology; } private Map<String, VendorConfiguration> parseVendorConfigurations( Map<File, String> configurationData) { _logger.info("\n*** PARSING VENDOR CONFIGURATION FILES ***\n"); resetTimer(); Map<String, VendorConfiguration> vendorConfigurations = new TreeMap<String, VendorConfiguration>(); boolean processingError = false; for (File currentFile : configurationData.keySet()) { String fileText = configurationData.get(currentFile); String currentPath = currentFile.getAbsolutePath(); VendorConfiguration vc = null; if (fileText.length() == 0) { continue; } BatfishCombinedParser<?, ?> combinedParser = null; ParserRuleContext tree = null; ControlPlaneExtractor extractor = null; Warnings warnings = new Warnings(_settings.getPedanticAsError(), _settings.getPedanticRecord() && _logger.isActive(BatfishLogger.LEVEL_PEDANTIC), _settings.getRedFlagAsError(), _settings.getRedFlagRecord() && _logger.isActive(BatfishLogger.LEVEL_REDFLAG), _settings.getUnimplementedAsError(), _settings.getUnimplementedRecord() && _logger.isActive(BatfishLogger.LEVEL_UNIMPLEMENTED), _settings.printParseTree()); ConfigurationFormat format = identifyConfigurationFormat(fileText); switch (format) { case ARISTA: case CISCO: CiscoCombinedParser ciscoParser = new CiscoCombinedParser(fileText, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); combinedParser = ciscoParser; extractor = new CiscoControlPlaneExtractor(fileText, ciscoParser, warnings); break; case JUNIPER: if (_settings.flattenOnTheFly()) { _logger .warn("Flattening: \"" + currentPath + "\" on-the-fly; line-numbers reported for this file will be spurious\n"); fileText = flatten(fileText); } else { throw new BatfishException( "Juniper configurations must be flattened prior to this stage"); } // MISSING BREAK IS INTENTIONAL case FLAT_JUNIPER: FlatJuniperCombinedParser flatJuniperParser = new FlatJuniperCombinedParser( fileText, _settings.getThrowOnParserError(), _settings.getThrowOnLexerError()); combinedParser = flatJuniperParser; extractor = new FlatJuniperControlPlaneExtractor(fileText, flatJuniperParser, warnings); break; case JUNIPER_SWITCH: case VXWORKS: String unsupportedError = "Unsupported configuration format: \"" + format.toString() + "\" for file: \"" + currentPath + "\"\n"; if (!_settings.ignoreUnsupported() && _settings.exitOnParseError()) { throw new BatfishException(unsupportedError); } else if (!_settings.ignoreUnsupported()) { processingError = true; _logger.error(unsupportedError); } else { _logger.warn(unsupportedError); } continue; case UNKNOWN: default: String unknownError = "Unknown configuration format for file: \"" + currentPath + "\"\n"; if (_settings.exitOnParseError()) { throw new BatfishException(unknownError); } else { _logger.error(unknownError); processingError = true; continue; } } try { tree = parse(combinedParser, currentPath); _logger.info("\tPost-processing..."); extractor.processParseTree(tree); _logger.info("OK\n"); } catch (ParserBatfishException e) { String error = "Error parsing configuration file: \"" + currentPath + "\""; if (_settings.exitOnParseError()) { throw new BatfishException(error, e); } else { _logger.error(error + ":\n"); _logger.error(ExceptionUtils.getStackTrace(e)); processingError = true; continue; } } catch (Exception e) { String error = "Error post-processing parse tree of configuration file: \"" + currentPath + "\""; if (_settings.exitOnParseError()) { throw new BatfishException(error, e); } else { _logger.error(error + ":\n"); _logger.error(ExceptionUtils.getStackTrace(e)); processingError = true; continue; } } finally { for (String warning : warnings.getRedFlagWarnings()) { _logger.redflag(warning); } for (String warning : warnings.getUnimplementedWarnings()) { _logger.unimplemented(warning); } for (String warning : warnings.getPedanticWarnings()) { _logger.pedantic(warning); } } vc = extractor.getVendorConfiguration(); // at this point we should have a VendorConfiguration vc String hostname = vc.getHostname(); if (hostname == null) { String error = "No hostname set in file: \"" + currentFile + "\"\n"; if (_settings.exitOnParseError()) { throw new BatfishException(error); } else { _logger.error(error); processingError = true; continue; } } if (vendorConfigurations.containsKey(hostname)) { String error = "Duplicate hostname \"" + vc.getHostname() + "\" found in " + currentFile + "\n"; if (_settings.exitOnParseError()) { throw new BatfishException(error); } else { _logger.error(error); processingError = true; continue; } } vendorConfigurations.put(vc.getHostname(), vc); } if (processingError) { return null; } else { printElapsedTime(); return vendorConfigurations; } } private void populateConfigurationFactBins( Collection<Configuration> configurations, Map<String, StringBuilder> factBins) { _logger .info("\n*** EXTRACTING LOGICBLOX FACTS FROM CONFIGURATIONS ***\n"); resetTimer(); Set<Long> communities = new LinkedHashSet<Long>(); for (Configuration c : configurations) { communities.addAll(c.getCommunities()); } for (Configuration c : configurations) { ConfigurationFactExtractor cfe = new ConfigurationFactExtractor(c, communities, factBins); cfe.writeFacts(); for (String warning : cfe.getWarnings()) { _logger.warn(warning); } } printElapsedTime(); } private void postFacts(LogicBloxFrontend lbFrontend, Map<String, StringBuilder> factBins) { _logger.info("\n*** POSTING FACTS TO BLOXWEB SERVICES ***\n"); resetTimer(); _logger.info("Starting bloxweb services..."); lbFrontend.startLbWebServices(); _logger.info("OK\n"); _logger.info("Posting facts..."); try { lbFrontend.postFacts(factBins); } catch (ServiceClientException e) { throw new BatfishException("Failed to post facts to bloxweb services", e); } _logger.info("OK\n"); _logger.info("Stopping bloxweb services..."); lbFrontend.stopLbWebServices(); _logger.info("OK\n"); _logger.info("SUCCESS\n"); printElapsedTime(); } private void printAllPredicateSemantics( Map<String, String> predicateSemantics) { // Get predicate semantics from rules file _logger.info("\n*** PRINTING PREDICATE SEMANTICS ***\n"); List<String> helpPredicates = getHelpPredicates(predicateSemantics); for (String predicate : helpPredicates) { printPredicateSemantics(predicate); _logger.info("\n"); } } private void printElapsedTime() { double seconds = getElapsedTime(_timerCount); _logger.info("Time taken for this task: " + seconds + " seconds\n"); } private void printPredicate(LogicBloxFrontend lbFrontend, String predicateName) { List<String> output; printPredicateSemantics(predicateName); String qualifiedName = _predicateInfo.getPredicateNames().get( predicateName); if (qualifiedName == null) { // predicate not found _logger.error("ERROR: No information for predicate: " + predicateName + "\n"); return; } Relation relation = lbFrontend.queryPredicate(qualifiedName); try { output = lbFrontend.getPredicate(_predicateInfo, relation, predicateName); for (String match : output) { _logger.output(match + "\n"); } } catch (QueryException q) { _logger.fatal(q.getMessage() + "\n"); } } private void printPredicateCount(LogicBloxFrontend lbFrontend, String predicateName) { int numRows = lbFrontend.queryPredicate(predicateName).getColumns() .get(0).size(); String output = "|" + predicateName + "| = " + numRows + "\n"; _logger.info(output); } public void printPredicateCounts(LogicBloxFrontend lbFrontend, Set<String> predicateNames) { // Print predicate(s) here _logger.info("\n*** SUBMITTING QUERY(IES) ***\n"); resetTimer(); for (String predicateName : predicateNames) { printPredicateCount(lbFrontend, predicateName); // _logger.info("\n"); } printElapsedTime(); } public void printPredicates(LogicBloxFrontend lbFrontend, Set<String> predicateNames) { // Print predicate(s) here _logger.info("\n*** SUBMITTING QUERY(IES) ***\n"); resetTimer(); for (String predicateName : predicateNames) { printPredicate(lbFrontend, predicateName); } printElapsedTime(); } private void printPredicateSemantics(String predicateName) { String semantics = _predicateInfo.getPredicateSemantics(predicateName); if (semantics == null) { semantics = "<missing>"; } _logger.info("\n"); _logger.info("Predicate: " + predicateName + "\n"); _logger.info("Semantics: " + semantics + "\n"); } private void processTopology(File topologyFilePath, Map<String, StringBuilder> factBins) { Topology topology = null; topology = parseTopology(topologyFilePath); TopologyFactExtractor tfe = new TopologyFactExtractor(topology); tfe.writeFacts(factBins); } private Map<File, String> readConfigurationFiles(String testRigPath) { _logger.info("\n*** READING CONFIGURATION FILES ***\n"); resetTimer(); Map<File, String> configurationData = new TreeMap<File, String>(); File configsPath = Paths.get(testRigPath, "configs").toFile(); File[] configFilePaths = configsPath.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return !name.startsWith("."); } }); if (configFilePaths == null) { throw new BatfishException("Error reading test rig configs directory"); } for (File file : configFilePaths) { _logger.debug("Reading: \"" + file.toString() + "\"\n"); String fileText = readFile(file.getAbsoluteFile()) + "\n"; configurationData.put(file, fileText); } printElapsedTime(); return configurationData; } public String readFile(File file) { String text = null; try { text = FileUtils.readFileToString(file); } catch (IOException e) { throw new BatfishException("Failed to read file: " + file.toString(), e); } return text; } private void resetTimer() { _timerCount = System.currentTimeMillis(); } private File retrieveLogicDir() { File logicDirFile = null; final String locatorFilename = LogicResourceLocator.class.getSimpleName() + ".class"; URL logicSourceURL = LogicResourceLocator.class.getProtectionDomain() .getCodeSource().getLocation(); String logicSourceString = logicSourceURL.toString(); UrlZipExplorer zip = null; StringFilter lbFilter = new StringFilter() { @Override public boolean accept(String filename) { return filename.endsWith(".lbb") || filename.endsWith(".lbp") || filename.endsWith(".semantics") || filename.endsWith(locatorFilename) || filename.endsWith(PREDICATE_INFO_FILENAME); } }; if (logicSourceString.startsWith("onejar:")) { FileVisitor<Path> visitor = null; try { zip = new UrlZipExplorer(logicSourceURL); Path destinationDir = Files.createTempDirectory("lbtmpproject"); File destinationDirAsFile = destinationDir.toFile(); zip.extractFiles(lbFilter, destinationDirAsFile); visitor = new SimpleFileVisitor<Path>() { private String _projectDirectory; @Override public String toString() { return _projectDirectory; } @Override public FileVisitResult visitFile(Path aFile, BasicFileAttributes aAttrs) throws IOException { if (aFile.endsWith(locatorFilename)) { _projectDirectory = aFile.getParent().toString(); return FileVisitResult.TERMINATE; } return FileVisitResult.CONTINUE; } }; Files.walkFileTree(destinationDir, visitor); _tmpLogicDir = destinationDirAsFile; } catch (IOException e) { throw new BatfishException( "Failed to retrieve logic dir from onejar archive", e); } String fileString = visitor.toString(); return new File(fileString); } else { String logicPackageResourceName = LogicResourceLocator.class .getPackage().getName().replace('.', SEPARATOR.charAt(0)); try { logicDirFile = new File(LogicResourceLocator.class.getClassLoader() .getResource(logicPackageResourceName).toURI()); } catch (URISyntaxException e) { throw new BatfishException("Failed to resolve logic directory", e); } return logicDirFile; } } private void revert(LogicBloxFrontend lbFrontend) { _logger.info("\n*** REVERTING WORKSPACE ***\n"); String workspaceName = new File(_settings.getTestRigPath()).getName(); String branchName = _settings.getBranchName(); _logger.debug("Reverting workspace: \"" + workspaceName + "\" to branch: \"" + branchName + "\n"); String errorResult = lbFrontend.revertDatabase(branchName); if (errorResult != null) { throw new BatfishException("Failed to revert database: " + errorResult); } } public void run() { if (_settings.getBuildPredicateInfo()) { buildPredicateInfo(); return; } if (_settings.getHistogram()) { histogram(_settings.getTestRigPath()); return; } if (_settings.getGenerateOspfTopologyPath() != null) { generateOspfConfigs(_settings.getGenerateOspfTopologyPath(), _settings.getSerializeIndependentPath()); return; } if (_settings.getFlatten()) { String flattenSource = Paths.get(_settings.getFlattenSource(), "configs").toString(); String flattenDestination = Paths.get( _settings.getFlattenDestination(), "configs").toString(); flatten(flattenSource, flattenDestination); return; } if (_settings.getGenerateStubs()) { String configPath = _settings.getSerializeIndependentPath(); String inputRole = _settings.getGenerateStubsInputRole(); String interfaceDescriptionRegex = _settings .getGenerateStubsInterfaceDescriptionRegex(); int stubAs = _settings.getGenerateStubsRemoteAs(); generateStubs(inputRole, stubAs, interfaceDescriptionRegex, configPath); return; } if (_settings.getZ3()) { Map<String, Configuration> configurations = deserializeConfigurations(_settings .getSerializeIndependentPath()); genZ3(configurations); return; } if (_settings.getAnonymize()) { anonymizeConfigurations(); return; } if (_settings.getInterfaceFailureInconsistencyReachableQuery()) { genReachableQueries(); return; } if (_settings.getRoleReachabilityQuery()) { genRoleReachabilityQueries(); return; } if (_settings.getRoleTransitQuery()) { genRoleTransitQueries(); return; } if (_settings.getInterfaceFailureInconsistencyBlackHoleQuery()) { genBlackHoleQueries(); return; } if (_settings.getGenerateMultipathInconsistencyQuery()) { genMultipathQueries(); return; } if (_settings.getSerializeVendor()) { String testRigPath = _settings.getTestRigPath(); String outputPath = _settings.getSerializeVendorPath(); serializeVendorConfigs(testRigPath, outputPath); return; } if (_settings.dumpInterfaceDescriptions()) { String testRigPath = _settings.getTestRigPath(); String outputPath = _settings.getDumpInterfaceDescriptionsPath(); dumpInterfaceDescriptions(testRigPath, outputPath); return; } if (_settings.getSerializeIndependent()) { String inputPath = _settings.getSerializeVendorPath(); String outputPath = _settings.getSerializeIndependentPath(); serializeIndependentConfigs(inputPath, outputPath); return; } if (_settings.getConcretize()) { concretize(); return; } if (_settings.getQuery() || _settings.getPrintSemantics() || _settings.getDataPlane()) { Map<String, String> logicFiles = getSemanticsFiles(); _predicateInfo = getPredicateInfo(logicFiles); // Print predicate semantics and quit if requested if (_settings.getPrintSemantics()) { printAllPredicateSemantics(_predicateInfo.getPredicateSemantics()); return; } } Map<String, StringBuilder> cpFactBins = null; if (_settings.getFacts() || _settings.getDumpControlPlaneFacts()) { cpFactBins = new LinkedHashMap<String, StringBuilder>(); initControlPlaneFactBins(cpFactBins); Map<String, Configuration> configurations = deserializeConfigurations(_settings .getSerializeIndependentPath()); writeTopologyFacts(_settings.getTestRigPath(), configurations, cpFactBins); writeConfigurationFacts(configurations, cpFactBins); String flowSinkPath = _settings.getFlowSinkPath(); if (flowSinkPath != null) { FlowSinkSet flowSinks = (FlowSinkSet) deserializeObject(new File( flowSinkPath)); writeFlowSinkFacts(flowSinks, cpFactBins); } if (_settings.getDumpControlPlaneFacts()) { dumpFacts(cpFactBins); } if (!(_settings.getFacts() || _settings.createWorkspace())) { return; } } // Start frontend LogicBloxFrontend lbFrontend = null; if (_settings.createWorkspace() || _settings.getFacts() || _settings.getQuery() || _settings.getDataPlane() || _settings.revert()) { lbFrontend = connect(); } if (_settings.revert()) { revert(lbFrontend); return; } // Create new workspace (will overwrite existing) if requested if (_settings.createWorkspace()) { addProject(lbFrontend); String lbHostnamePath = _settings.getJobLogicBloxHostnamePath(); String lbHostname = _settings.getServiceLogicBloxHostname(); if (lbHostnamePath != null && lbHostname != null) { writeFile(lbHostnamePath, lbHostname); } if (!_settings.getFacts()) { return; } } // Post facts if requested if (_settings.getFacts()) { addStaticFacts(lbFrontend, BASIC_FACTS_BLOCKNAME); postFacts(lbFrontend, cpFactBins); return; } if (_settings.getQuery()) { lbFrontend.initEntityTable(); Map<String, String> allPredicateNames = _predicateInfo .getPredicateNames(); Set<String> predicateNames = new TreeSet<String>(); if (_settings.getQueryAll()) { predicateNames.addAll(allPredicateNames.keySet()); } else { predicateNames.addAll(_settings.getPredicates()); } if (_settings.getCountsOnly()) { printPredicateCounts(lbFrontend, predicateNames); } else { printPredicates(lbFrontend, predicateNames); } return; } if (_settings.getDataPlane()) { computeDataPlane(lbFrontend); return; } Map<String, StringBuilder> trafficFactBins = null; if (_settings.getFlows() || _settings.getDumpTrafficFacts()) { trafficFactBins = new LinkedHashMap<String, StringBuilder>(); initTrafficFactBins(trafficFactBins); writeTrafficFacts(trafficFactBins); if (_settings.getDumpTrafficFacts()) { dumpFacts(trafficFactBins); } if (_settings.getFlows()) { lbFrontend = connect(); postFacts(lbFrontend, trafficFactBins); return; } } throw new BatfishException( "No task performed! Run with -help flag to see usage"); } private void serializeIndependentConfigs( Map<String, Configuration> configurations, String outputPath) { _logger .info("\n*** SERIALIZING VENDOR-INDEPENDENT CONFIGURATION STRUCTURES ***\n"); resetTimer(); new File(outputPath).mkdirs(); for (String name : configurations.keySet()) { Configuration c = configurations.get(name); Path currentOutputPath = Paths.get(outputPath, name); _logger.info("Serializing: \"" + name + "\" ==> \"" + currentOutputPath.toString() + "\""); serializeObject(c, currentOutputPath.toFile()); _logger.debug(" ...OK\n"); } printElapsedTime(); } private void serializeIndependentConfigs(String vendorConfigPath, String outputPath) { Map<String, Configuration> configurations = getConfigurations(vendorConfigPath); serializeIndependentConfigs(configurations, outputPath); } private void serializeObject(Object object, File outputFile) { FileOutputStream fos; ObjectOutputStream oos; try { fos = new FileOutputStream(outputFile); if (_settings.getSerializeToText()) { XStream xstream = new XStream(new DomDriver("UTF-8")); oos = xstream.createObjectOutputStream(fos); } else { oos = new ObjectOutputStream(fos); } oos.writeObject(object); oos.close(); } catch (IOException e) { throw new BatfishException( "Failed to serialize object to output file: " + outputFile.toString(), e); } } private void serializeVendorConfigs(String testRigPath, String outputPath) { Map<File, String> configurationData = readConfigurationFiles(testRigPath); Map<String, VendorConfiguration> vendorConfigurations = parseVendorConfigurations(configurationData); if (vendorConfigurations == null) { throw new BatfishException("Exiting due to parser errors\n"); } String nodeRolesPath = _settings.getNodeRolesPath(); if (nodeRolesPath != null) { NodeRoleMap nodeRoles = parseNodeRoles(testRigPath); for (Entry<String, RoleSet> nodeRolesEntry : nodeRoles.entrySet()) { String hostname = nodeRolesEntry.getKey(); VendorConfiguration config = vendorConfigurations.get(hostname); if (config == null) { throw new BatfishException( "role set assigned to non-existent node: \"" + hostname + "\""); } RoleSet roles = nodeRolesEntry.getValue(); config.setRoles(roles); } _logger.info("Serializing node-roles mappings: \"" + nodeRolesPath + "\"..."); serializeObject(nodeRoles, new File(nodeRolesPath)); _logger.info("OK\n"); } _logger.info("\n*** SERIALIZING VENDOR CONFIGURATION STRUCTURES ***\n"); resetTimer(); new File(outputPath).mkdirs(); for (String name : vendorConfigurations.keySet()) { VendorConfiguration vc = vendorConfigurations.get(name); Path currentOutputPath = Paths.get(outputPath, name); _logger.debug("Serializing: \"" + name + "\" ==> \"" + currentOutputPath.toString() + "\"..."); serializeObject(vc, currentOutputPath.toFile()); _logger.debug("OK\n"); } printElapsedTime(); } public void writeConfigurationFacts( Map<String, Configuration> configurations, Map<String, StringBuilder> factBins) { populateConfigurationFactBins(configurations.values(), factBins); } private void writeFile(String outputPath, String output) { File outputFile = new File(outputPath); try { FileUtils.write(outputFile, output); } catch (IOException e) { throw new BatfishException("Failed to write file: " + outputPath, e); } } private void writeFlowSinkFacts(FlowSinkSet flowSinks, Map<String, StringBuilder> cpFactBins) { StringBuilder sb = cpFactBins.get("SetFlowSinkInterface"); for (FlowSinkInterface f : flowSinks) { String node = f.getNode(); String iface = f.getInterface(); sb.append(node + "|" + iface + "\n"); } } public void writeTopologyFacts(String testRigPath, Map<String, Configuration> configurations, Map<String, StringBuilder> factBins) { Path topologyFilePath = Paths.get(testRigPath, TOPOLOGY_FILENAME); // Get generated facts from topology file if (Files.exists(topologyFilePath)) { processTopology(topologyFilePath.toFile(), factBins); } else { // tell logicblox to guess adjacencies based on interface // subnetworks _logger .info("*** (GUESSING TOPOLOGY IN ABSENCE OF EXPLICIT FILE) ***\n"); StringBuilder wGuessTopology = factBins.get("GuessTopology"); wGuessTopology.append("1\n"); } } private void writeTrafficFacts(Map<String, StringBuilder> factBins) { StringBuilder wSetFlowOriginate = factBins.get("SetFlowOriginate"); RoleNodeMap roleNodes = null; if (_settings.getRoleHeaders()) { String nodeRolesPath = _settings.getNodeRolesPath(); NodeRoleMap nodeRoles = (NodeRoleMap) deserializeObject(new File( nodeRolesPath)); roleNodes = nodeRoles.toRoleNodeMap(); } parseFlowsFromConstraints(wSetFlowOriginate, roleNodes); if (_settings.duplicateRoleFlows()) { StringBuilder wDuplicateRoleFlows = factBins.get("DuplicateRoleFlows"); wDuplicateRoleFlows.append("1\n"); } } }
bug fix
projects/batfish/src/org/batfish/main/Batfish.java
bug fix
Java
bsd-3-clause
a5a8d92f902b0074ccc1b49865af7ca782cd5d0c
0
HearthStats/HearthStats.net-Uploader,HearthStats/HearthStats.net-Uploader
package net.hearthstats; import com.boxysystems.jgoogleanalytics.FocusPoint; import com.boxysystems.jgoogleanalytics.JGoogleAnalyticsTracker; import net.hearthstats.analysis.AnalyserEvent; import net.hearthstats.analysis.HearthstoneAnalyser; import net.hearthstats.log.Log; import net.hearthstats.log.LogPane; import net.hearthstats.notification.DialogNotificationQueue; import net.hearthstats.notification.NotificationQueue; import net.hearthstats.notification.OsxNotificationQueue; import net.hearthstats.state.Screen; import net.hearthstats.state.ScreenGroup; import net.hearthstats.ui.MatchEndPopup; import net.hearthstats.util.Rank; import net.miginfocom.swing.MigLayout; import org.json.simple.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.swing.*; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.event.HyperlinkListener; import java.awt.*; import java.awt.event.*; import java.awt.image.BufferedImage; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.text.MessageFormat; import java.util.*; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; @SuppressWarnings("serial") public class Monitor extends JFrame implements Observer, WindowListener { private static final String PROFILES_URL = "http://hearthstats.net/profiles"; private static final String DECKS_URL = "http://hearthstats.net/decks"; private static final int POLLING_INTERVAL_IN_MS = 100; private static final int MAX_THREADS = 5; private static final int GC_FREQUENCY = 20; private static final EnumSet<Screen> DO_NOT_NOTIFY_SCREENS = EnumSet.of(Screen.COLLECTION, Screen.COLLECTION_ZOOM, Screen.MAIN_TODAYSQUESTS, Screen.TITLE); private static Logger debugLog = LoggerFactory.getLogger(Monitor.class); private static Logger perfLog = LoggerFactory.getLogger("net.hearthstats.performance"); public static final String[] hsClassOptions = { "- undetected -", "Druid", "Hunter", "Mage", "Paladin", "Priest", "Rogue", "Shaman", "Warlock", "Warrior" }; protected API _api = new API(); protected HearthstoneAnalyser _analyzer = new HearthstoneAnalyser(); protected ProgramHelper _hsHelper; private HyperlinkListener _hyperLinkListener = HyperLinkHandler.getInstance(); private JTextField _currentOpponentNameField; private JLabel _currentMatchLabel; private JCheckBox _currentGameCoinField; private JTextArea _currentNotesField; private JButton _lastMatchButton; private HearthstoneMatch _lastMatch; private JComboBox<String> _deckSlot1Field; private JComboBox<String> _deckSlot2Field; private JComboBox<String> _deckSlot3Field; private JComboBox<String> _deckSlot4Field; private JComboBox<String> _deckSlot5Field; private JComboBox<String> _deckSlot6Field; private JComboBox<String> _deckSlot7Field; private JComboBox<String> _deckSlot8Field; private JComboBox<String> _deckSlot9Field; private JComboBox _currentOpponentClassSelect; private JComboBox _currentYourClassSelector; private int _numThreads = 0; private int _pollIterations = 0; protected boolean _hearthstoneDetected; protected JGoogleAnalyticsTracker _analytics; protected LogPane _logText; private JScrollPane _logScroll; private JTextField _userKeyField; private JCheckBox _checkUpdatesField; private JCheckBox _notificationsEnabledField; private JComboBox _notificationsFormat; private JCheckBox _showHsFoundField; private JCheckBox _showHsClosedField; private JCheckBox _showScreenNotificationField; private JCheckBox _showModeNotificationField; private JCheckBox _showDeckNotificationField; private JComboBox showMatchPopupField; private JCheckBox _analyticsField; private JCheckBox _minToTrayField; private JCheckBox _startMinimizedField; private JCheckBox _showYourTurnNotificationField; private JTabbedPane _tabbedPane; private ResourceBundle _bundle = ResourceBundle.getBundle("net.hearthstats.resources.Main"); public Monitor() throws HeadlessException { switch (Config.os) { case WINDOWS: _hsHelper = new ProgramHelperWindows("Hearthstone.exe"); break; case OSX: _hsHelper = new ProgramHelperOsx("unity.Blizzard Entertainment.Hearthstone"); break; default: throw new UnsupportedOperationException(t("error.os_unsupported")); } } /** * Loads text from the main resource bundle, using the local language when available. * @param key the key for the desired string * @return The requested string */ private String t(String key) { return _bundle.getString(key); } /** * Loads text from the main resource bundle, using the local language when available, and puts the given value into the appropriate spot. * @param key the key for the desired string * @param value0 a value to place in the {0} placeholder in the string * @return The requested string */ private String t(String key, String value0) { String message = _bundle.getString(key); return MessageFormat.format(message, value0); } public void start() throws IOException { if (Config.analyticsEnabled()) { debugLog.debug("Enabling analytics"); _analytics = new JGoogleAnalyticsTracker("HearthStats.net " + t("Uploader"), Config.getVersionWithOs(), "UA-45442103-3"); _analytics.trackAsynchronously(new FocusPoint("AppStart")); } addWindowListener(this); _createAndShowGui(); _showWelcomeLog(); _checkForUpdates(); _api.addObserver(this); _analyzer.addObserver(this); _hsHelper.addObserver(this); if(_checkForUserKey()) { _pollHearthstone(); } if (Config.os == Config.OS.OSX) { Log.info(t("waiting_for_hs")); } else { Log.info(t("waiting_for_hs_windowed")); } } private void _showWelcomeLog() { debugLog.debug("Showing welcome log messages"); Log.welcome("HearthStats.net " + t("Uploader") + " v" + Config.getVersionWithOs()); Log.help(t("welcome_1_set_decks")); if (Config.os == Config.OS.OSX) { Log.help(t("welcome_2_run_hearthstone")); Log.help(t("welcome_3_notifications")); } else { Log.help(t("welcome_2_run_hearthstone_windowed")); Log.help(t("welcome_3_notifications_windowed")); } String logFileLocation = Log.getLogFileLocation(); if (logFileLocation == null) { Log.help(t("welcome_4_feedback")); } else { Log.help(t("welcome_4_feedback_with_log", logFileLocation)); } } private boolean _checkForUserKey() { if(Config.getUserKey().equals("your_userkey_here")) { Log.warn(t("error.userkey_not_entered")); bringWindowToFront(); JOptionPane.showMessageDialog(this, "HearthStats.net " + t("error.title") + ":\n\n" + t("you_need_to_enter_userkey") + "\n\n" + t("get_it_at_hsnet_profiles")); // Create Desktop object Desktop d = Desktop.getDesktop(); // Browse a URL, say google.com try { d.browse(new URI(PROFILES_URL)); } catch (IOException | URISyntaxException e) { Log.warn("Error launching browser with URL " + PROFILES_URL, e); } String[] options = {t("button.ok"), t("button.cancel")}; JPanel panel = new JPanel(); JLabel lbl = new JLabel(t("UserKey")); JTextField txt = new JTextField(10); panel.add(lbl); panel.add(txt); int selectedOption = JOptionPane.showOptionDialog(this, panel, t("enter_your_userkey"), JOptionPane.NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, options[0]); if(selectedOption == 0) { String userkey = txt.getText(); if(userkey.isEmpty()) { _checkForUserKey(); } else { Config.setUserKey(userkey); try { Config.save(); } catch (Throwable e) { Log.warn("Error occurred trying to write settings file, your settings may not be saved", e); } _userKeyField.setText(userkey); Log.info(t("UserkeyStored")); _pollHearthstone(); } } else { System.exit(0); } return false; } return true; } /** * Brings the monitor window to the front of other windows. Should only be used for important events like a * modal dialog or error that we want the user to see immediately. */ private void bringWindowToFront() { final Monitor frame = this; java.awt.EventQueue.invokeLater(new Runnable() { @Override public void run() { frame.setVisible(true); } }); } /** * Overridden version of setVisible based on http://stackoverflow.com/questions/309023/how-to-bring-a-window-to-the-front * that should ensure the window is brought to the front for important things like modal dialogs. */ @Override public void setVisible(final boolean visible) { // let's handle visibility... if (!visible || !isVisible()) { // have to check this condition simply because super.setVisible(true) invokes toFront if frame was already visible super.setVisible(visible); } // ...and bring frame to the front.. in a strange and weird way if (visible) { int state = super.getExtendedState(); state &= ~JFrame.ICONIFIED; super.setExtendedState(state); super.setAlwaysOnTop(true); super.toFront(); super.requestFocus(); super.setAlwaysOnTop(false); } } @Override public void toFront() { super.setVisible(true); int state = super.getExtendedState(); state &= ~JFrame.ICONIFIED; super.setExtendedState(state); super.setAlwaysOnTop(true); super.toFront(); super.requestFocus(); super.setAlwaysOnTop(false); } private void _createAndShowGui() { debugLog.debug("Creating GUI"); Image icon = new ImageIcon(getClass().getResource("/images/icon.png")).getImage(); setIconImage(icon); setLocation(Config.getX(), Config.getY()); setSize(Config.getWidth(), Config.getHeight()); _tabbedPane = new JTabbedPane(); add(_tabbedPane); // log _logText = new LogPane(); _logScroll = new JScrollPane (_logText, JScrollPane.VERTICAL_SCROLLBAR_ALWAYS, JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); _tabbedPane.add(_logScroll, t("tab.log")); _tabbedPane.add(_createMatchUi(), t("tab.current_match")); _tabbedPane.add(_createDecksUi(), t("tab.decks")); _tabbedPane.add(_createOptionsUi(), t("tab.options")); _tabbedPane.add(_createAboutUi(), t("tab.about")); _tabbedPane.addChangeListener(new ChangeListener() { public void stateChanged(ChangeEvent e) { if (_tabbedPane.getSelectedIndex() == 2) try { _updateDecksTab(); } catch (IOException e1) { _notify(t("error.loading_decks.title"), t("error.loading_decks")); Log.warn(t("error.loading_decks"), e1); } } }); _updateCurrentMatchUi(); _enableMinimizeToTray(); setMinimumSize(new Dimension(500, 600)); setVisible(true); if(Config.startMinimized()) setState(JFrame.ICONIFIED); _updateTitle(); } private JScrollPane _createAboutUi() { JPanel panel = new JPanel(); panel.setMaximumSize(new Dimension(100,100)); panel.setBackground(Color.WHITE); MigLayout layout = new MigLayout(""); panel.setLayout(layout); JEditorPane text = new JEditorPane(); text.setContentType("text/html"); text.setEditable(false); text.setBackground(Color.WHITE); text.setText("<html><body style=\"font-family:'Helvetica Neue', Helvetica, Arial, sans-serif; font-size:10px;\">" + "<h2 style=\"font-weight:normal\"><a href=\"http://hearthstats.net\">HearthStats.net</a> " + t("Uploader") + " v" + Config.getVersion() + "</h2>" + "<p><strong>" + t("Author") + ":</strong> " + "Jerome Dane (<a href=\"https://plus.google.com/+JeromeDane\">Google+</a>, <a href=\"http://twitter.com/JeromeDane\">Twitter</a>), " + "Charles Gutjahr (<a href=\"http://charlesgutjahr.com\">Website</a>)</p>" + "<p>" + t("about.utility_l1") + "<br>" + t("about.utility_l2") + "<br>" + t("about.utility_l3") + "</p>" + "<p>" + t("about.open_source_l1") + "<br>" + t("about.open_source_l2") + "</p>" + "<p>&bull; <a href=\"https://github.com/HearthStats/HearthStats.net-Uploader/\">" + t("about.project_source") + "</a><br/>" + "&bull; <a href=\"https://github.com/HearthStats/HearthStats.net-Uploader/releases\">" + t("about.releases_and_changelog") + "</a><br/>" + "&bull; <a href=\"https://github.com/HearthStats/HearthStats.net-Uploader/issues\">" + t("about.feedback_and_suggestions") + "</a><br/>" + "&bull; <a href=\"http://redd.it/1wa4rc/\">Reddit thread</a> (please up-vote)</p>" + "<p><strong>" + t("about.support_project") + ":</strong></p>" + "</body></html>" ); text.addHyperlinkListener(_hyperLinkListener); panel.add(text, "wrap"); JButton donateButton = new JButton("<html><img style=\"border-style: none;\" src=\"" + getClass().getResource("/images/donate.gif") + "\"/></html>"); donateButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { // Create Desktop object Desktop d = Desktop.getDesktop(); // Browse a URL, say google.com try { d.browse(new URI("https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=UJFTUHZF6WPDS")); } catch (Throwable e1) { Main.showErrorDialog("Error launching browser with donation URL", e1); } } }); donateButton.setCursor(new Cursor(Cursor.HAND_CURSOR)); panel.add(donateButton, "wrap"); JEditorPane contribtorsText = new JEditorPane(); contribtorsText.setContentType("text/html"); contribtorsText.setEditable(false); contribtorsText.setBackground(Color.WHITE); contribtorsText.setText("<html><body style=\"font-family:arial,sans-serif; font-size:10px;\">" + "<p><strong>Contributors</strong> (listed alphabetically):</p>" + "<p>" + "&bull; <a href=\"https://github.com/gtch\">Charles Gutjahr</a> - OS X version and new screen detection<br>" + "&bull; <a href=\"https://github.com/jcrka\">jcrka</a> - Russian translation<br>" + "&bull; <a href=\"https://github.com/JeromeDane\">Jerome Dane</a> - Original developer<br>" + "&bull; <a href=\"https://github.com/sargonas\">J Eckert</a> - Fixed notifications spawning taskbar icons<br>" + "&bull; <a href=\"https://github.com/nwalsh1995\">nwalsh1995</a> - Started turn detection development<br>" + "&bull; <a href=\"https://github.com/remcoros\">Remco Ros</a> (<a href=\"http://hearthstonetracker.com/\">HearthstoneTracker</a>) - Provides advice &amp; suggestins<br>" + "&bull; <a href=\"https://github.com/RoiyS\">RoiyS</a> - Added option to disable all notifications<br>" + "</p>"+ "</body></html>" ); contribtorsText.addHyperlinkListener(_hyperLinkListener); panel.add(contribtorsText, "wrap"); return new JScrollPane(panel, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_NEVER); } private JPanel _createMatchUi() { JPanel panel = new JPanel(); MigLayout layout = new MigLayout(); panel.setLayout(layout); // match label panel.add(new JLabel(" "), "wrap"); _currentMatchLabel = new JLabel(); panel.add(_currentMatchLabel, "skip,span,wrap"); panel.add(new JLabel(" "), "wrap"); String[] localizedClassOptions = new String[hsClassOptions.length]; localizedClassOptions[0] = "- " + t("undetected") + " -"; for(int i = 1; i < localizedClassOptions.length; i++) localizedClassOptions[i] = t(hsClassOptions[i]); // your class panel.add(new JLabel(t("match.label.your_class") + " "), "skip,right"); _currentYourClassSelector = new JComboBox<>(localizedClassOptions); panel.add(_currentYourClassSelector, "wrap"); // opponent class panel.add(new JLabel(t("match.label.opponents_class") + " "), "skip,right"); _currentOpponentClassSelect = new JComboBox<>(localizedClassOptions); panel.add(_currentOpponentClassSelect, "wrap"); // Opponent name panel.add(new JLabel("Opponent's Name: "), "skip,right"); _currentOpponentNameField = new JTextField(); _currentOpponentNameField.setMinimumSize(new Dimension(100, 1)); _currentOpponentNameField.addKeyListener(new KeyAdapter() { public void keyReleased(KeyEvent e) { _analyzer.getMatch().setOpponentName(_currentOpponentNameField.getText().replaceAll("(\r\n|\n)", "<br/>")); } }); panel.add(_currentOpponentNameField, "wrap"); // coin panel.add(new JLabel(t("match.label.coin") + " "), "skip,right"); _currentGameCoinField = new JCheckBox(t("match.coin")); _currentGameCoinField.setSelected(Config.showHsClosedNotification()); _currentGameCoinField.addChangeListener(new ChangeListener() { public void stateChanged(ChangeEvent e) { _analyzer.getMatch().setCoin(_currentGameCoinField.isSelected()); } }); panel.add(_currentGameCoinField, "wrap"); // notes panel.add(new JLabel(t("match.label.notes") + " "), "skip,wrap"); _currentNotesField = new JTextArea(); _currentNotesField.setBorder(BorderFactory.createCompoundBorder( BorderFactory.createMatteBorder(1, 1, 1, 1, Color.black), BorderFactory.createEmptyBorder(3, 6, 3, 6))); _currentNotesField.setMinimumSize(new Dimension(350, 150)); _currentNotesField.setBackground(Color.WHITE); _currentNotesField.addKeyListener(new KeyAdapter() { public void keyReleased(KeyEvent e) { _analyzer.getMatch().setNotes(_currentNotesField.getText()); } }); panel.add(_currentNotesField, "skip,span"); panel.add(new JLabel(" "), "wrap"); // last match panel.add(new JLabel(t("match.label.previous_match") + " "), "skip,wrap"); _lastMatchButton = new JButton("[n/a]"); _lastMatchButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent arg0) { String url = "Arena".equals(_lastMatch.getMode()) ? "http://hearthstats.net/arenas/new" : _lastMatch.getEditUrl(); try { Desktop.getDesktop().browse(new URI(url)); } catch (Throwable e) { Main.showErrorDialog("Error launching browser with URL " + url, e); } } }); _lastMatchButton.setEnabled(false); panel.add(_lastMatchButton, "skip,wrap,span"); return panel; } private JPanel _createDecksUi() { JPanel panel = new JPanel(); MigLayout layout = new MigLayout(); panel.setLayout(layout); panel.add(new JLabel(" "), "wrap"); panel.add(new JLabel(t("set_your_deck_slots")), "skip,span,wrap"); panel.add(new JLabel(" "), "wrap"); panel.add(new JLabel(t("deck_slot.label_1")), "skip"); panel.add(new JLabel(t("deck_slot.label_2")), ""); panel.add(new JLabel(t("deck_slot.label_3")), "wrap"); _deckSlot1Field = new JComboBox<>(); panel.add(_deckSlot1Field, "skip"); _deckSlot2Field = new JComboBox<>(); panel.add(_deckSlot2Field, ""); _deckSlot3Field = new JComboBox<>(); panel.add(_deckSlot3Field, "wrap"); panel.add(new JLabel(" "), "wrap"); panel.add(new JLabel(t("deck_slot.label_4")), "skip"); panel.add(new JLabel(t("deck_slot.label_5")), ""); panel.add(new JLabel(t("deck_slot.label_6")), "wrap"); _deckSlot4Field = new JComboBox<>(); panel.add(_deckSlot4Field, "skip"); _deckSlot5Field = new JComboBox<>(); panel.add(_deckSlot5Field, ""); _deckSlot6Field = new JComboBox<>(); panel.add(_deckSlot6Field, "wrap"); panel.add(new JLabel(" "), "wrap"); panel.add(new JLabel(t("deck_slot.label_7")), "skip"); panel.add(new JLabel(t("deck_slot.label_8")), ""); panel.add(new JLabel(t("deck_slot.label_9")), "wrap"); _deckSlot7Field = new JComboBox<>(); panel.add(_deckSlot7Field, "skip"); _deckSlot8Field = new JComboBox<>(); panel.add(_deckSlot8Field, ""); _deckSlot9Field = new JComboBox<>(); panel.add(_deckSlot9Field, "wrap"); panel.add(new JLabel(" "), "wrap"); panel.add(new JLabel(" "), "wrap"); JButton saveButton = new JButton(t("button.save_deck_slots")); saveButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { _saveDeckSlots(); } }); panel.add(saveButton, "skip"); JButton refreshButton = new JButton(t("button.refresh")); refreshButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { try { _updateDecksTab(); } catch (IOException e1) { Main.showErrorDialog("Error updating decks", e1); } } }); panel.add(refreshButton, "wrap,span"); panel.add(new JLabel(" "), "wrap"); panel.add(new JLabel(" "), "wrap"); JButton myDecksButton = new JButton(t("manage_decks_on_hsnet")); myDecksButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { try { Desktop.getDesktop().browse(new URI(DECKS_URL)); } catch (Throwable e1) { Main.showErrorDialog("Error launching browser with URL" + DECKS_URL, e1); } } }); panel.add(myDecksButton, "skip,span"); return panel; } private JPanel _createOptionsUi() { JPanel panel = new JPanel(); MigLayout layout = new MigLayout(); panel.setLayout(layout); panel.add(new JLabel(" "), "wrap"); // user key panel.add(new JLabel(t("options.label.userkey") + " "), "skip,right"); _userKeyField = new JTextField(); _userKeyField.setText(Config.getUserKey()); panel.add(_userKeyField, "wrap"); // check for updates panel.add(new JLabel(t("options.label.updates") + " "), "skip,right"); _checkUpdatesField = new JCheckBox(t("options.check_updates")); _checkUpdatesField.setSelected(Config.checkForUpdates()); panel.add(_checkUpdatesField, "wrap"); // show notifications panel.add(new JLabel(t("options.label.notifications") + " "), "skip,right"); _notificationsEnabledField = new JCheckBox("Show notifications"); _notificationsEnabledField.setSelected(Config.showNotifications()); _notificationsEnabledField.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { _updateNotificationCheckboxes(); } }); panel.add(_notificationsEnabledField, "wrap"); // When running on Mac OS X 10.8 or later, the format of the notifications can be changed if (Config.isOsxNotificationsSupported()) { panel.add(new JLabel(""), "skip,right"); JLabel notificationsFormatLabel = new JLabel(t("options.label.notifyformat.label")); panel.add(notificationsFormatLabel, "split 2, gapleft 27"); _notificationsFormat = new JComboBox<>(new String[]{ t("options.label.notifyformat.osx"), t("options.label.notifyformat.hearthstats")}); _notificationsFormat.setSelectedIndex(Config.useOsxNotifications() ? 0 : 1); panel.add(_notificationsFormat, "wrap"); } // show HS found notification panel.add(new JLabel(""), "skip,right"); _showHsFoundField = new JCheckBox(t("options.notification.hs_found")); _showHsFoundField.setSelected(Config.showHsFoundNotification()); panel.add(_showHsFoundField, "wrap"); // show HS closed notification panel.add(new JLabel(""), "skip,right"); _showHsClosedField = new JCheckBox(t("options.notification.hs_closed")); _showHsClosedField.setSelected(Config.showHsClosedNotification()); panel.add(_showHsClosedField, "wrap"); // show game screen notification panel.add(new JLabel(""), "skip,right"); _showScreenNotificationField = new JCheckBox(t("options.notification.screen")); _showScreenNotificationField.setSelected(Config.showScreenNotification()); panel.add(_showScreenNotificationField, "wrap"); // show game mode notification panel.add(new JLabel(""), "skip,right"); _showModeNotificationField = new JCheckBox(t("options.notification.mode")); _showModeNotificationField.setSelected(Config.showModeNotification()); panel.add(_showModeNotificationField, "wrap"); // show deck notification panel.add(new JLabel(""), "skip,right"); _showDeckNotificationField = new JCheckBox(t("options.notification.deck")); _showDeckNotificationField.setSelected(Config.showDeckNotification()); panel.add(_showDeckNotificationField, "wrap"); // show your turn notification panel.add(new JLabel(""), "skip,right"); _showYourTurnNotificationField = new JCheckBox(t("options.notification.turn")); _showYourTurnNotificationField.setSelected(Config.showYourTurnNotification()); panel.add(_showYourTurnNotificationField, "wrap"); _updateNotificationCheckboxes(); panel.add(new JLabel(t("options.label.matchpopup")), "skip,right"); showMatchPopupField = new JComboBox<>(new String[]{ t("options.label.matchpopup.always"), t("options.label.matchpopup.incomplete"), t("options.label.matchpopup.never")}); showMatchPopupField.setSelectedIndex(Config.showMatchPopup().ordinal()); panel.add(showMatchPopupField, "wrap"); // minimize to tray panel.add(new JLabel("Interface: "), "skip,right"); _minToTrayField = new JCheckBox(t("options.notification.min_to_tray")); _minToTrayField.setSelected(Config.checkForUpdates()); panel.add(_minToTrayField, "wrap"); // start minimized panel.add(new JLabel(""), "skip,right"); _startMinimizedField = new JCheckBox(t("options.notification.start_min")); _startMinimizedField.setSelected(Config.startMinimized()); panel.add(_startMinimizedField, "wrap"); // analytics panel.add(new JLabel("Analytics: "), "skip,right"); _analyticsField = new JCheckBox(t("options.submit_stats")); final Monitor frame = this; _analyticsField.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { if(!_analyticsField.isSelected()) { int dialogResult = JOptionPane.showConfirmDialog(frame, "A lot of work has gone into this uploader.\n" + "It is provided for free, and all we ask in return\n" + "is that you let us track basic, anonymous statistics\n" + "about how frequently it is being used." + "\n\nAre you sure you want to disable analytics?" , "Please reconsider ...", JOptionPane.YES_NO_OPTION); if (dialogResult == JOptionPane.NO_OPTION){ _analyticsField.setSelected(true); } } } }); _analyticsField.setSelected(Config.analyticsEnabled()); panel.add(_analyticsField, "wrap"); // Save button panel.add(new JLabel(""), "skip,right"); JButton saveOptionsButton = new JButton(t("button.save_options")); saveOptionsButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { _saveOptions(); } }); panel.add(saveOptionsButton, "wrap"); return panel; } private void _updateNotificationCheckboxes() { boolean isEnabled = _notificationsEnabledField.isSelected(); if (_notificationsFormat != null) { _notificationsFormat.setEnabled(isEnabled); } _showHsFoundField.setEnabled(isEnabled); _showHsClosedField.setEnabled(isEnabled); _showScreenNotificationField.setEnabled(isEnabled); _showModeNotificationField.setEnabled(isEnabled); _showDeckNotificationField.setEnabled(isEnabled); } private void _applyDecksToSelector(JComboBox<String> selector, Integer slotNum) { selector.setMaximumSize(new Dimension(145, selector.getSize().height)); selector.removeAllItems(); selector.addItem("- Select a deck -"); List<JSONObject> decks = DeckSlotUtils.getDecks(); for(int i = 0; i < decks.size(); i++) { selector.addItem(decks.get(i).get("name") + " #" + decks.get(i).get("id")); if(decks.get(i).get("slot") != null && decks.get(i).get("slot").toString().equals(slotNum.toString())) selector.setSelectedIndex(i + 1); } } private void _updateDecksTab() throws IOException { DeckSlotUtils.updateDecks(); _applyDecksToSelector(_deckSlot1Field, 1); _applyDecksToSelector(_deckSlot2Field, 2); _applyDecksToSelector(_deckSlot3Field, 3); _applyDecksToSelector(_deckSlot4Field, 4); _applyDecksToSelector(_deckSlot5Field, 5); _applyDecksToSelector(_deckSlot6Field, 6); _applyDecksToSelector(_deckSlot7Field, 7); _applyDecksToSelector(_deckSlot8Field, 8); _applyDecksToSelector(_deckSlot9Field, 9); } private void _checkForUpdates() { if(Config.checkForUpdates()) { Log.info(t("checking_for_updates...")); try { String availableVersion = Updater.getAvailableVersion(); if (availableVersion != null) { Log.info(t("latest_v_available") + " " + availableVersion); if (!availableVersion.matches(Config.getVersion())) { bringWindowToFront(); int dialogButton = JOptionPane.YES_NO_OPTION; int dialogResult = JOptionPane.showConfirmDialog(this, "A new version of this uploader is available\n\n" + Updater.getRecentChanges() + "\n\n" + t("would_u_like_to_install_update") , "HearthStats.net " + t("uploader_updates_avail"), dialogButton); if (dialogResult == JOptionPane.YES_OPTION){ /* // Create Desktop object Desktop d = Desktop.getDesktop(); // Browse a URL, say google.com d.browse(new URI("https://github.com/JeromeDane/HearthStats.net-Uploader/releases")); System.exit(0); */ Updater.run(); } else { dialogResult = JOptionPane.showConfirmDialog(null, t("would_you_like_to_disable_updates"), t("disable_update_checking"), dialogButton); if(dialogResult == JOptionPane.YES_OPTION){ String[] options = { t("button.ok") }; JPanel panel = new JPanel(); JLabel lbl = new JLabel(t("reenable_updates_any_time")); panel.add(lbl); JOptionPane.showOptionDialog(this, panel, t("updates_disabled_msg"), JOptionPane.NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, options , options[0]); Config.setCheckForUpdates(false); } } } } else { Log.warn("Unable to determine latest available version"); } } catch (Throwable e) { e.printStackTrace(System.err); _notify("Update Checking Error", "Unable to determine the latest available version"); } } } protected ScheduledExecutorService scheduledExecutorService = Executors.newScheduledThreadPool(MAX_THREADS); protected boolean _drawPaneAdded = false; protected BufferedImage image; protected JPanel _drawPane = new JPanel() { @Override protected void paintComponent(Graphics g) { super.paintComponent(g); g.drawImage(image, 0, 0, null); } }; protected NotificationQueue _notificationQueue = Config.useOsxNotifications() ? new OsxNotificationQueue() : new DialogNotificationQueue(); private Boolean _currentMatchEnabled = false; private boolean _playingInMatch = false; protected void _notify(String header) { _notify(header, ""); } protected void _notify(String header, String message) { if (!Config.showNotifications()) return; //Notifications disabled _notificationQueue.add(header, message, false); } protected void _updateTitle() { String title = "HearthStats.net Uploader"; if (_hearthstoneDetected) { if (_analyzer.getScreen() != null) { title += " - " + _analyzer.getScreen().title; // if (_analyzer.getScreen() == "Play" && _analyzer.getMode() != null) { if (_analyzer.getScreen() == Screen.PLAY_LOBBY && _analyzer.getMode() != null) { title += " " + _analyzer.getMode(); } if (_analyzer.getScreen() == Screen.FINDING_OPPONENT) { if (_analyzer.getMode() != null) { title += " for " + _analyzer.getMode() + " Game"; } } // TODO: replace with enum values if ("Match Start".equals(_analyzer.getScreen().title) || "Playing".equals(_analyzer.getScreen().title)) { title += " " + (_analyzer.getMode() == null ? "[undetected]" : _analyzer.getMode()); title += " " + (_analyzer.getCoin() ? "" : "No ") + "Coin"; title += " " + (_analyzer.getYourClass() == null ? "[undetected]" : _analyzer.getYourClass()); title += " VS. " + (_analyzer.getOpponentClass() == null ? "[undetected]" : _analyzer.getOpponentClass()); } } } else { title += " - Waiting for Hearthstone "; } setTitle(title); } private int _getClassOptionIndex(String cName) { for (int i = 0; i < hsClassOptions.length; i++) { if (hsClassOptions[i].equals(cName)) { return i; } } return 0; } private void _updateCurrentMatchUi() { HearthstoneMatch match = _analyzer.getMatch(); _updateMatchClassSelectorsIfSet(match); if(_currentMatchEnabled) _currentMatchLabel.setText(match.getMode() + " Match - " + " Turn " + match.getNumTurns()); else _currentMatchLabel.setText("Waiting for next match to start ..."); _currentOpponentNameField.setText(match.getOpponentName()); _currentOpponentClassSelect.setSelectedIndex(_getClassOptionIndex(match.getOpponentClass())); _currentYourClassSelector.setSelectedIndex(_getClassOptionIndex(match.getUserClass())); _currentGameCoinField.setSelected(match.hasCoin()); _currentNotesField.setText(match.getNotes()); // last match if(_lastMatch != null && _lastMatch.getMode() != null) { if(_lastMatch.getResult() != null) { String tooltip = (_lastMatch.getMode().equals("Arena") ? "View current arena run on" : "Edit the previous match") + " on HearthStats.net"; _lastMatchButton.setToolTipText(tooltip); _lastMatchButton.setText(_lastMatch.toString()); _lastMatchButton.setEnabled(true); } } } private void _updateImageFrame() { if (!_drawPaneAdded) { add(_drawPane); } if (image.getWidth() >= 1024) { setSize(image.getWidth(), image.getHeight()); } _drawPane.repaint(); invalidate(); validate(); repaint(); } private void _submitMatchResult(HearthstoneMatch hsMatch) throws IOException { // check for new arena run if ("Arena".equals(hsMatch.getMode()) && _analyzer.isNewArena()) { ArenaRun run = new ArenaRun(); run.setUserClass(hsMatch.getUserClass()); Log.info("Creating new " + run.getUserClass() + "arena run"); _notify("Creating new " + run.getUserClass() + "arena run"); _api.createArenaRun(run); _analyzer.setIsNewArena(false); } String header = "Submitting match result"; String message = hsMatch.toString(); _notify(header, message); Log.matchResult(header + ": " + message); if(Config.analyticsEnabled()) { _analytics.trackAsynchronously(new FocusPoint("Submit" + hsMatch.getMode() + "Match")); } _api.createMatch(hsMatch); } private void _resetMatchClassSelectors() { _currentYourClassSelector.setSelectedIndex(0); _currentOpponentClassSelect.setSelectedIndex(0); } private void _updateMatchClassSelectorsIfSet(HearthstoneMatch hsMatch) { if (_currentYourClassSelector.getSelectedIndex() > 0) { hsMatch.setUserClass(hsClassOptions[_currentYourClassSelector.getSelectedIndex()]); } if (_currentOpponentClassSelect.getSelectedIndex() > 0) { hsMatch.setOpponentClass(hsClassOptions[_currentOpponentClassSelect.getSelectedIndex()]); } } protected void _handleHearthstoneFound(int currentPollIteration) { debugLog.debug(" - Iteration {} found Hearthstone", currentPollIteration); // mark hearthstone found if necessary if (!_hearthstoneDetected) { _hearthstoneDetected = true; debugLog.debug(" - Iteration {} changed hearthstoneDetected to true", currentPollIteration); if (Config.showHsFoundNotification()) { _notify("Hearthstone found"); } } // grab the image from Hearthstone debugLog.debug(" - Iteration {} screen capture", currentPollIteration); image = _hsHelper.getScreenCapture(); if (image == null) { debugLog.debug(" - Iteration {} screen capture returned null", currentPollIteration); } else { // detect image stats if (image.getWidth() >= 1024) { debugLog.debug(" - Iteration {} analysing image", currentPollIteration); _analyzer.analyze(image); } if (Config.mirrorGameImage()) { debugLog.debug(" - Iteration {} mirroring image", currentPollIteration); _updateImageFrame(); } } } protected void _handleHearthstoneNotFound(int currentPollIteration) { // mark hearthstone not found if necessary if (_hearthstoneDetected) { _hearthstoneDetected = false; debugLog.debug(" - Iteration {} changed hearthstoneDetected to false", currentPollIteration); if (Config.showHsClosedNotification()) { _notify("Hearthstone closed"); _analyzer.reset(); } } } protected void _pollHearthstone() { scheduledExecutorService.schedule(new Callable<Object>() { public Object call() throws Exception { _numThreads++; _pollIterations++; // A copy of pollIterations is kept in localPollIterations int currentPollIteration = _pollIterations; try { debugLog.debug("--> Iteration {} started", currentPollIteration); if (_hsHelper.foundProgram()) { _handleHearthstoneFound(currentPollIteration); } else { debugLog.debug(" - Iteration {} did not find Hearthstone", currentPollIteration); _handleHearthstoneNotFound(currentPollIteration); } _updateTitle(); _pollHearthstone(); // repeat the process // Keep memory usage down by telling the JVM to perform a garbage collection after every eighth poll (ie GC 1-2 times per second) if (_pollIterations % GC_FREQUENCY == 0 && Runtime.getRuntime().totalMemory() > 150000000) { debugLog.debug(" - Iteration {} triggers GC", currentPollIteration); System.gc(); } _numThreads--; } catch (Throwable ex) { debugLog.error(" - Iteration " + currentPollIteration + " caused exception which is being ignored:", ex); } finally { debugLog.debug("<-- Iteration {} finished", currentPollIteration); } return ""; } }, POLLING_INTERVAL_IN_MS, TimeUnit.MILLISECONDS); } /** * Checks whether the match result is complete, showing a popup if necessary to fix the match data, * and then submits the match when ready. * * @param match The match to check and submit. */ private void checkMatchResult(final HearthstoneMatch match) { _updateMatchClassSelectorsIfSet(match); final Config.MatchPopup matchPopup = Config.showMatchPopup(); final boolean showPopup; switch (matchPopup) { case ALWAYS: showPopup = true; break; case INCOMPLETE: showPopup = !match.isDataComplete(); break; case NEVER: showPopup = false; break; default: throw new UnsupportedOperationException("Unknown config option " + Config.showMatchPopup()); } if (showPopup) { // Show a popup allowing the user to edit their match before submitting final Monitor monitor = this; SwingUtilities.invokeLater(new Runnable() { @Override public void run() { try { boolean matchHasValidationErrors = !match.isDataComplete(); String infoMessage = null; do { if (infoMessage == null) { infoMessage = (matchPopup == Config.MatchPopup.INCOMPLETE) ? "Some match information couldn't be detected.<br>Please update these details then click Submit to submit the match to HearthStats:" : "The end of the match has been detected.<br>Please check these details then submit the match to HearthStats:"; } bringWindowToFront(); MatchEndPopup.Button buttonPressed = MatchEndPopup.showPopup(monitor, match, infoMessage); matchHasValidationErrors = !match.isDataComplete(); switch (buttonPressed) { case SUBMIT: if (matchHasValidationErrors) { infoMessage = "Some match information is incomplete.<br>Please update these details then click Submit to submit the match to HearthStats:"; } else { _submitMatchResult(match); } break; case CANCEL: return; } } while (matchHasValidationErrors); } catch (IOException e) { Main.showErrorDialog("Error submitting match result", e); } } }); } else { // Don't show a popup, submit the match directly try { _submitMatchResult(match); } catch (IOException e) { Main.showErrorDialog("Error submitting match result", e); } } } private void handleAnalyserEvent(AnalyserEvent changed) throws IOException { switch(changed) { case ARENA_END: _notify("End of Arena Run Detected"); Log.info("End of Arena Run Detected"); _api.endCurrentArenaRun(); break; case COIN: _notify("Coin Detected"); Log.info("Coin Detected"); break; case DECK_SLOT: JSONObject deck = DeckSlotUtils.getDeckFromSlot(_analyzer.getDeckSlot()); if (deck == null) { _tabbedPane.setSelectedIndex(2); bringWindowToFront(); Main.showMessageDialog(this, "Unable to determine what deck you have in slot #" + _analyzer.getDeckSlot() + "\n\nPlease set your decks in the \"Decks\" tab."); } else { _notify("Deck Detected", deck.get("name").toString()); Log.info("Deck Detected: " + deck.get("name") + " Detected"); } break; case MODE: _playingInMatch = false; _setCurrentMatchEnabledi(false); if (Config.showModeNotification()) { debugLog.debug(_analyzer.getMode() + " level " + _analyzer.getRankLevel()); if ("Ranked".equals(_analyzer.getMode())) { _notify(_analyzer.getMode() + " Mode Detected", "Rank Level " + _analyzer.getRankLevel()); } else { _notify(_analyzer.getMode() + " Mode Detected"); } } if ("Ranked".equals(_analyzer.getMode())) { Log.info(_analyzer.getMode() + " Mode Detected - Level " + _analyzer.getRankLevel()); } else { Log.info(_analyzer.getMode() + " Mode Detected"); } break; case NEW_ARENA: if(_analyzer.isNewArena()) _notify("New Arena Run Detected"); Log.info("New Arena Run Detected"); break; case OPPONENT_CLASS: _notify("Playing vs " + _analyzer.getOpponentClass()); Log.info("Playing vs " + _analyzer.getOpponentClass()); break; case OPPONENT_NAME: _notify("Opponent: " + _analyzer.getOpponentName()); Log.info("Opponent: " + _analyzer.getOpponentName()); break; case RESULT: _playingInMatch = false; _setCurrentMatchEnabledi(false); _notify(_analyzer.getResult() + " Detected"); Log.info(_analyzer.getResult() + " Detected"); checkMatchResult(_analyzer.getMatch()); break; case SCREEN: boolean inGameModeScreen = (_analyzer.getScreen() == Screen.ARENA_LOBBY || _analyzer.getScreen() == Screen.ARENA_END || _analyzer.getScreen() == Screen.PLAY_LOBBY); if (inGameModeScreen) { if (_playingInMatch && _analyzer.getResult() == null) { _playingInMatch = false; _notify("Detection Error", "Match result was not detected."); Log.info("Detection Error: Match result was not detected."); checkMatchResult(_analyzer.getMatch()); } _playingInMatch = false; } if (_analyzer.getScreen() == Screen.FINDING_OPPONENT) { _resetMatchClassSelectors(); } if (_analyzer.getScreen().group == ScreenGroup.MATCH_START) { _setCurrentMatchEnabledi(true); _playingInMatch = true; } if (_analyzer.getScreen().group != ScreenGroup.MATCH_END && !DO_NOT_NOTIFY_SCREENS.contains(_analyzer.getScreen()) && Config.showScreenNotification()) { if (_analyzer.getScreen() == Screen.PRACTICE_LOBBY) { _notify(_analyzer.getScreen().title + " Screen Detected", "Results are not tracked in practice mode"); } else { _notify(_analyzer.getScreen().title + " Screen Detected"); } } if (_analyzer.getScreen() == Screen.PRACTICE_LOBBY) { Log.info(_analyzer.getScreen().title + " Screen Detected. Result tracking disabled."); } else { if (_analyzer.getScreen() == Screen.MATCH_VS) { Log.divider(); } Log.info(_analyzer.getScreen().title + " Screen Detected"); } break; case YOUR_CLASS: _notify("Playing as " + _analyzer.getYourClass()); Log.info("Playing as " + _analyzer.getYourClass()); break; case YOUR_TURN: if (Config.showYourTurnNotification()) { _notify((_analyzer.isYourTurn() ? "Your" : "Opponent") + " turn detected"); } Log.info((_analyzer.isYourTurn() ? "Your" : "Opponent") + " turn detected"); break; case ERROR_ANALYSING_IMAGE: _notify("Error analysing opponent name image"); Log.info("Error analysing opponent name image"); break; default: _notify("Unhandled event"); Log.info("Unhandled event"); } _updateCurrentMatchUi(); } public LogPane getLogPane() { return _logText; } private void _handleApiEvent(Object changed) { switch(changed.toString()) { case "error": _notify("API Error", _api.getMessage()); Log.error("API Error: " + _api.getMessage()); Main.showMessageDialog(this, "API Error: " + _api.getMessage()); break; case "result": Log.info("API Result: " + _api.getMessage()); _lastMatch = _analyzer.getMatch(); _lastMatch.setId(_api.getLastMatchId()); _setCurrentMatchEnabledi(false); _updateCurrentMatchUi(); // new line after match result if(_api.getMessage().matches(".*(Edit match|Arena match successfully created).*")) { _analyzer.resetMatch(); _resetMatchClassSelectors(); Log.divider(); } break; } } private void _handleProgramHelperEvent(Object changed) { Log.info(changed.toString()); if (changed.toString().matches(".*minimized.*")) { _notify("Hearthstone Minimized", "Warning! No detection possible while minimized."); } if (changed.toString().matches(".*fullscreen.*")) { JOptionPane.showMessageDialog(this, "Hearthstats.net Uploader Warning! \n\nNo detection possible while Hearthstone is in fullscreen mode.\n\nPlease set Hearthstone to WINDOWED mode and close and RESTART Hearthstone.\n\nSorry for the inconvenience."); } if (changed.toString().matches(".*restored.*")) { _notify("Hearthstone Restored", "Resuming detection ..."); } } @Override public void update(Observable dispatcher, Object changed) { if (dispatcher.getClass().isAssignableFrom(HearthstoneAnalyser.class)) try { handleAnalyserEvent((AnalyserEvent) changed); } catch (IOException e) { Main.showErrorDialog("Error handling analyzer event", e); } if(dispatcher.getClass().isAssignableFrom(API.class)) _handleApiEvent(changed); if(dispatcher.getClass().toString().matches(".*ProgramHelper(Windows|Osx)?")) _handleProgramHelperEvent(changed); } @Override public void windowActivated(WindowEvent e) { // TODO Auto-generated method stub } @Override public void windowClosed(WindowEvent e) { // TODO Auto-generated method stub debugLog.debug("closed"); } @Override public void windowClosing(WindowEvent e) { Point p = getLocationOnScreen(); Config.setX(p.x); Config.setY(p.y); Dimension rect = getSize(); Config.setWidth((int) rect.getWidth()); Config.setHeight((int) rect.getHeight()); try { Config.save(); } catch (Throwable t) { Log.warn("Error occurred trying to write settings file, your settings may not be saved", t); } System.exit(0); } @Override public void windowDeactivated(WindowEvent e) { // TODO Auto-generated method stub } @Override public void windowDeiconified(WindowEvent e) { // TODO Auto-generated method stub } @Override public void windowIconified(WindowEvent e) { // TODO Auto-generated method stub } @Override public void windowOpened(WindowEvent e) { // TODO Auto-generated method stub } private Integer _getDeckSlotDeckId(JComboBox selector) { Integer deckId = null; String deckStr = (String) selector.getItemAt(selector.getSelectedIndex()); Pattern pattern = Pattern.compile("[^0-9]+([0-9]+)$"); Matcher matcher = pattern.matcher(deckStr); if(matcher.find()) { deckId = Integer.parseInt(matcher.group(1)); } return deckId; } private void _saveDeckSlots() { try { _api.setDeckSlots( _getDeckSlotDeckId(_deckSlot1Field), _getDeckSlotDeckId(_deckSlot2Field), _getDeckSlotDeckId(_deckSlot3Field), _getDeckSlotDeckId(_deckSlot4Field), _getDeckSlotDeckId(_deckSlot5Field), _getDeckSlotDeckId(_deckSlot6Field), _getDeckSlotDeckId(_deckSlot7Field), _getDeckSlotDeckId(_deckSlot8Field), _getDeckSlotDeckId(_deckSlot9Field) ); Main.showMessageDialog(this, _api.getMessage()); _updateDecksTab(); } catch (Throwable e) { Main.showErrorDialog("Error saving deck slots", e); } } private void _saveOptions() { debugLog.debug("Saving options..."); Config.setUserKey(_userKeyField.getText()); Config.setCheckForUpdates(_checkUpdatesField.isSelected()); Config.setShowNotifications(_notificationsEnabledField.isSelected()); Config.setShowHsFoundNotification(_showHsFoundField.isSelected()); Config.setShowHsClosedNotification(_showHsClosedField.isSelected()); Config.setShowScreenNotification(_showScreenNotificationField.isSelected()); Config.setShowModeNotification(_showModeNotificationField.isSelected()); Config.setShowDeckNotification(_showDeckNotificationField.isSelected()); Config.setShowYourTurnNotification(_showYourTurnNotificationField.isSelected()); Config.setShowMatchPopup(Config.MatchPopup.values()[showMatchPopupField.getSelectedIndex()]); Config.setAnalyticsEnabled(_analyticsField.isSelected()); Config.setMinToTray(_minToTrayField.isSelected()); Config.setStartMinimized(_startMinimizedField.isSelected()); if (_notificationsFormat != null) { // This control only appears on OS X machines, will be null on Windows machines Config.setUseOsxNotifications(_notificationsFormat.getSelectedIndex() == 0); _notificationQueue = Config.useOsxNotifications() ? new OsxNotificationQueue() : new DialogNotificationQueue(); } try { Config.save(); debugLog.debug("...save complete"); JOptionPane.showMessageDialog(this, "Options Saved"); } catch (Throwable e) { Log.warn("Error occurred trying to write settings file, your settings may not be saved", e); JOptionPane.showMessageDialog(null, "Error occurred trying to write settings file, your settings may not be saved"); } } private void _setCurrentMatchEnabledi(Boolean enabled){ _currentMatchEnabled = enabled; _currentYourClassSelector.setEnabled(enabled); _currentOpponentClassSelect.setEnabled(enabled); _currentGameCoinField.setEnabled(enabled); _currentOpponentNameField.setEnabled(enabled); _currentNotesField.setEnabled(enabled); } //http://stackoverflow.com/questions/7461477/how-to-hide-a-jframe-in-system-tray-of-taskbar TrayIcon trayIcon; SystemTray tray; private void _enableMinimizeToTray(){ if(SystemTray.isSupported()){ tray = SystemTray.getSystemTray(); ActionListener exitListener = new ActionListener() { public void actionPerformed(ActionEvent e) { System.exit(0); } }; PopupMenu popup = new PopupMenu(); MenuItem defaultItem = new MenuItem("Restore"); defaultItem.setFont(new Font("Arial",Font.BOLD,14)); defaultItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { setVisible(true); setExtendedState(JFrame.NORMAL); } }); popup.add(defaultItem); defaultItem = new MenuItem("Exit"); defaultItem.addActionListener(exitListener); defaultItem.setFont(new Font("Arial",Font.PLAIN,14)); popup.add(defaultItem); Image icon = new ImageIcon(getClass().getResource("/images/icon.png")).getImage(); trayIcon = new TrayIcon(icon, "HearthStats.net Uploader", popup); trayIcon.setImageAutoSize(true); trayIcon.addMouseListener(new MouseAdapter(){ public void mousePressed(MouseEvent e){ if(e.getClickCount() >= 2){ setVisible(true); setExtendedState(JFrame.NORMAL); } } }); } else { debugLog.debug("system tray not supported"); } addWindowStateListener(new WindowStateListener() { public void windowStateChanged(WindowEvent e) { if (Config.minimizeToTray()) { if (e.getNewState() == ICONIFIED) { try { tray.add(trayIcon); setVisible(false); } catch (AWTException ex) { } } if (e.getNewState()==7) { try{ tray.add(trayIcon); setVisible(false); } catch(AWTException ex){ } } if (e.getNewState()==MAXIMIZED_BOTH) { tray.remove(trayIcon); setVisible(true); } if (e.getNewState()==NORMAL) { tray.remove(trayIcon); setVisible(true); debugLog.debug("Tray icon removed"); } } } }); } }
uploader/src/main/java/net/hearthstats/Monitor.java
package net.hearthstats; import com.boxysystems.jgoogleanalytics.FocusPoint; import com.boxysystems.jgoogleanalytics.JGoogleAnalyticsTracker; import net.hearthstats.analysis.AnalyserEvent; import net.hearthstats.analysis.HearthstoneAnalyser; import net.hearthstats.log.Log; import net.hearthstats.log.LogPane; import net.hearthstats.notification.DialogNotificationQueue; import net.hearthstats.notification.NotificationQueue; import net.hearthstats.notification.OsxNotificationQueue; import net.hearthstats.state.Screen; import net.hearthstats.state.ScreenGroup; import net.hearthstats.ui.MatchEndPopup; import net.hearthstats.util.Rank; import net.miginfocom.swing.MigLayout; import org.json.simple.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.swing.*; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.event.HyperlinkListener; import java.awt.*; import java.awt.event.*; import java.awt.image.BufferedImage; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.text.MessageFormat; import java.util.*; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; @SuppressWarnings("serial") public class Monitor extends JFrame implements Observer, WindowListener { private static final String PROFILES_URL = "http://hearthstats.net/profiles"; private static final String DECKS_URL = "http://hearthstats.net/decks"; private static final int POLLING_INTERVAL_IN_MS = 100; private static final int MAX_THREADS = 5; private static final int GC_FREQUENCY = 20; private static final EnumSet<Screen> DO_NOT_NOTIFY_SCREENS = EnumSet.of(Screen.COLLECTION, Screen.COLLECTION_ZOOM, Screen.MAIN_TODAYSQUESTS, Screen.TITLE); private static Logger debugLog = LoggerFactory.getLogger(Monitor.class); private static Logger perfLog = LoggerFactory.getLogger("net.hearthstats.performance"); public static final String[] hsClassOptions = { "- undetected -", "Druid", "Hunter", "Mage", "Paladin", "Priest", "Rogue", "Shaman", "Warlock", "Warrior" }; protected API _api = new API(); protected HearthstoneAnalyser _analyzer = new HearthstoneAnalyser(); protected ProgramHelper _hsHelper; private HyperlinkListener _hyperLinkListener = HyperLinkHandler.getInstance(); private JTextField _currentOpponentNameField; private JLabel _currentMatchLabel; private JCheckBox _currentGameCoinField; private JTextArea _currentNotesField; private JButton _lastMatchButton; private HearthstoneMatch _lastMatch; private JComboBox<String> _deckSlot1Field; private JComboBox<String> _deckSlot2Field; private JComboBox<String> _deckSlot3Field; private JComboBox<String> _deckSlot4Field; private JComboBox<String> _deckSlot5Field; private JComboBox<String> _deckSlot6Field; private JComboBox<String> _deckSlot7Field; private JComboBox<String> _deckSlot8Field; private JComboBox<String> _deckSlot9Field; private JComboBox _currentOpponentClassSelect; private JComboBox _currentYourClassSelector; private int _numThreads = 0; private int _pollIterations = 0; protected boolean _hearthstoneDetected; protected JGoogleAnalyticsTracker _analytics; protected LogPane _logText; private JScrollPane _logScroll; private JTextField _userKeyField; private JCheckBox _checkUpdatesField; private JCheckBox _notificationsEnabledField; private JComboBox _notificationsFormat; private JCheckBox _showHsFoundField; private JCheckBox _showHsClosedField; private JCheckBox _showScreenNotificationField; private JCheckBox _showModeNotificationField; private JCheckBox _showDeckNotificationField; private JComboBox showMatchPopupField; private JCheckBox _analyticsField; private JCheckBox _minToTrayField; private JCheckBox _startMinimizedField; private JCheckBox _showYourTurnNotificationField; private JTabbedPane _tabbedPane; private ResourceBundle _bundle = ResourceBundle.getBundle("net.hearthstats.resources.Main"); public Monitor() throws HeadlessException { switch (Config.os) { case WINDOWS: _hsHelper = new ProgramHelperWindows("Hearthstone.exe"); break; case OSX: _hsHelper = new ProgramHelperOsx("unity.Blizzard Entertainment.Hearthstone"); break; default: throw new UnsupportedOperationException(t("error.os_unsupported")); } } /** * Loads text from the main resource bundle, using the local language when available. * @param key the key for the desired string * @return The requested string */ private String t(String key) { return _bundle.getString(key); } /** * Loads text from the main resource bundle, using the local language when available, and puts the given value into the appropriate spot. * @param key the key for the desired string * @param value0 a value to place in the {0} placeholder in the string * @return The requested string */ private String t(String key, String value0) { String message = _bundle.getString(key); return MessageFormat.format(message, value0); } public void start() throws IOException { if (Config.analyticsEnabled()) { debugLog.debug("Enabling analytics"); _analytics = new JGoogleAnalyticsTracker("HearthStats.net " + t("Uploader"), Config.getVersionWithOs(), "UA-45442103-3"); _analytics.trackAsynchronously(new FocusPoint("AppStart")); } addWindowListener(this); _createAndShowGui(); _showWelcomeLog(); _checkForUpdates(); _api.addObserver(this); _analyzer.addObserver(this); _hsHelper.addObserver(this); if(_checkForUserKey()) { _pollHearthstone(); } if (Config.os == Config.OS.OSX) { Log.info(t("waiting_for_hs")); } else { Log.info(t("waiting_for_hs_windowed")); } } private void _showWelcomeLog() { debugLog.debug("Showing welcome log messages"); Log.welcome("HearthStats.net " + t("Uploader") + " v" + Config.getVersionWithOs()); Log.help(t("welcome_1_set_decks")); if (Config.os == Config.OS.OSX) { Log.help(t("welcome_2_run_hearthstone")); Log.help(t("welcome_3_notifications")); } else { Log.help(t("welcome_2_run_hearthstone_windowed")); Log.help(t("welcome_3_notifications_windowed")); } String logFileLocation = Log.getLogFileLocation(); if (logFileLocation == null) { Log.help(t("welcome_4_feedback")); } else { Log.help(t("welcome_4_feedback_with_log", logFileLocation)); } } private boolean _checkForUserKey() { if(Config.getUserKey().equals("your_userkey_here")) { Log.warn(t("error.userkey_not_entered")); bringWindowToFront(); JOptionPane.showMessageDialog(this, "HearthStats.net " + t("error.title") + ":\n\n" + t("you_need_to_enter_userkey") + "\n\n" + t("get_it_at_hsnet_profiles")); // Create Desktop object Desktop d = Desktop.getDesktop(); // Browse a URL, say google.com try { d.browse(new URI(PROFILES_URL)); } catch (IOException | URISyntaxException e) { Log.warn("Error launching browser with URL " + PROFILES_URL, e); } String[] options = {t("button.ok"), t("button.cancel")}; JPanel panel = new JPanel(); JLabel lbl = new JLabel(t("UserKey")); JTextField txt = new JTextField(10); panel.add(lbl); panel.add(txt); int selectedOption = JOptionPane.showOptionDialog(this, panel, t("enter_your_userkey"), JOptionPane.NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, options[0]); if(selectedOption == 0) { String userkey = txt.getText(); if(userkey.isEmpty()) { _checkForUserKey(); } else { Config.setUserKey(userkey); try { Config.save(); } catch (Throwable e) { Log.warn("Error occurred trying to write settings file, your settings may not be saved", e); } _userKeyField.setText(userkey); Log.info(t("UserkeyStored")); _pollHearthstone(); } } else { System.exit(0); } return false; } return true; } /** * Brings the monitor window to the front of other windows. Should only be used for important events like a * modal dialog or error that we want the user to see immediately. */ private void bringWindowToFront() { final Monitor frame = this; java.awt.EventQueue.invokeLater(new Runnable() { @Override public void run() { frame.setVisible(true); } }); } /** * Overridden version of setVisible based on http://stackoverflow.com/questions/309023/how-to-bring-a-window-to-the-front * that should ensure the window is brought to the front for important things like modal dialogs. */ @Override public void setVisible(final boolean visible) { // let's handle visibility... if (!visible || !isVisible()) { // have to check this condition simply because super.setVisible(true) invokes toFront if frame was already visible super.setVisible(visible); } // ...and bring frame to the front.. in a strange and weird way if (visible) { int state = super.getExtendedState(); state &= ~JFrame.ICONIFIED; super.setExtendedState(state); super.setAlwaysOnTop(true); super.toFront(); super.requestFocus(); super.setAlwaysOnTop(false); } } @Override public void toFront() { super.setVisible(true); int state = super.getExtendedState(); state &= ~JFrame.ICONIFIED; super.setExtendedState(state); super.setAlwaysOnTop(true); super.toFront(); super.requestFocus(); super.setAlwaysOnTop(false); } private void _createAndShowGui() { debugLog.debug("Creating GUI"); Image icon = new ImageIcon(getClass().getResource("/images/icon.png")).getImage(); setIconImage(icon); setLocation(Config.getX(), Config.getY()); setSize(Config.getWidth(), Config.getHeight()); _tabbedPane = new JTabbedPane(); add(_tabbedPane); // log _logText = new LogPane(); _logScroll = new JScrollPane (_logText, JScrollPane.VERTICAL_SCROLLBAR_ALWAYS, JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); _tabbedPane.add(_logScroll, t("tab.log")); _tabbedPane.add(_createMatchUi(), t("tab.current_match")); _tabbedPane.add(_createDecksUi(), t("tab.decks")); _tabbedPane.add(_createOptionsUi(), t("tab.options")); _tabbedPane.add(_createAboutUi(), t("tab.about")); _tabbedPane.addChangeListener(new ChangeListener() { public void stateChanged(ChangeEvent e) { if (_tabbedPane.getSelectedIndex() == 2) try { _updateDecksTab(); } catch (IOException e1) { _notify(t("error.loading_decks.title"), t("error.loading_decks")); Log.warn(t("error.loading_decks"), e1); } } }); _updateCurrentMatchUi(); _enableMinimizeToTray(); setMinimumSize(new Dimension(500, 600)); setVisible(true); if(Config.startMinimized()) setState(JFrame.ICONIFIED); _updateTitle(); } private JScrollPane _createAboutUi() { JPanel panel = new JPanel(); panel.setMaximumSize(new Dimension(100,100)); panel.setBackground(Color.WHITE); MigLayout layout = new MigLayout(""); panel.setLayout(layout); JEditorPane text = new JEditorPane(); text.setContentType("text/html"); text.setEditable(false); text.setBackground(Color.WHITE); text.setText("<html><body style=\"font-family:'Helvetica Neue', Helvetica, Arial, sans-serif; font-size:10px;\">" + "<h2 style=\"font-weight:normal\"><a href=\"http://hearthstats.net\">HearthStats.net</a> " + t("Uploader") + " v" + Config.getVersion() + "</h2>" + "<p><strong>" + t("Author") + ":</strong> " + "Jerome Dane (<a href=\"https://plus.google.com/+JeromeDane\">Google+</a>, <a href=\"http://twitter.com/JeromeDane\">Twitter</a>), " + "Charles Gutjahr (<a href=\"http://charlesgutjahr.com\">Website</a>)</p>" + "<p>" + t("about.utility_l1") + "<br>" + t("about.utility_l2") + "<br>" + t("about.utility_l3") + "</p>" + "<p>" + t("about.open_source_l1") + "<br>" + t("about.open_source_l2") + "</p>" + "<p>&bull; <a href=\"https://github.com/HearthStats/HearthStats.net-Uploader/\">" + t("about.project_source") + "</a><br/>" + "&bull; <a href=\"https://github.com/HearthStats/HearthStats.net-Uploader/releases\">" + t("about.releases_and_changelog") + "</a><br/>" + "&bull; <a href=\"https://github.com/HearthStats/HearthStats.net-Uploader/issues\">" + t("about.feedback_and_suggestions") + "</a><br/>" + "&bull; <a href=\"http://redd.it/1wa4rc/\">Reddit thread</a> (please up-vote)</p>" + "<p><strong>" + t("about.support_project") + ":</strong></p>" + "</body></html>" ); text.addHyperlinkListener(_hyperLinkListener); panel.add(text, "wrap"); JButton donateButton = new JButton("<html><img style=\"border-style: none;\" src=\"" + getClass().getResource("/images/donate.gif") + "\"/></html>"); donateButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { // Create Desktop object Desktop d = Desktop.getDesktop(); // Browse a URL, say google.com try { d.browse(new URI("https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=UJFTUHZF6WPDS")); } catch (Throwable e1) { Main.showErrorDialog("Error launching browser with donation URL", e1); } } }); donateButton.setCursor(new Cursor(Cursor.HAND_CURSOR)); panel.add(donateButton, "wrap"); JEditorPane contribtorsText = new JEditorPane(); contribtorsText.setContentType("text/html"); contribtorsText.setEditable(false); contribtorsText.setBackground(Color.WHITE); contribtorsText.setText("<html><body style=\"font-family:arial,sans-serif; font-size:10px;\">" + "<p><strong>Contributors</strong> (listed alphabetically):</p>" + "<p>" + "&bull; <a href=\"https://github.com/JeromeDane\">Jerome Dane</a> - Original developer<br>" + "&bull; <a href=\"https://github.com/gtch\">Charles Gutjahr</a> - Added OS X support and new screen detection<br>" + "&bull; <a href=\"https://github.com/sargonas\">J Eckert</a> - Fixed notifications spawning taskbar icons<br>" + "&bull; <a href=\"https://github.com/nwalsh1995\">nwalsh1995</a> - Started turn detection development<br>" + "&bull; <a href=\"https://github.com/remcoros\">Remco Ros</a> (<a href=\"http://hearthstonetracker.com/\">HearthstoneTracker</a>) - Provides advice & suggestins<br>" + "&bull; <a href=\"https://github.com/RoiyS\">RoiyS</a> - Added option to disable all notifications<br>" + "</p>"+ "</body></html>" ); contribtorsText.addHyperlinkListener(_hyperLinkListener); panel.add(contribtorsText, "wrap"); return new JScrollPane(panel, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_NEVER); } private JPanel _createMatchUi() { JPanel panel = new JPanel(); MigLayout layout = new MigLayout(); panel.setLayout(layout); // match label panel.add(new JLabel(" "), "wrap"); _currentMatchLabel = new JLabel(); panel.add(_currentMatchLabel, "skip,span,wrap"); panel.add(new JLabel(" "), "wrap"); String[] localizedClassOptions = new String[hsClassOptions.length]; localizedClassOptions[0] = "- " + t("undetected") + " -"; for(int i = 1; i < localizedClassOptions.length; i++) localizedClassOptions[i] = t(hsClassOptions[i]); // your class panel.add(new JLabel(t("match.label.your_class") + " "), "skip,right"); _currentYourClassSelector = new JComboBox<>(localizedClassOptions); panel.add(_currentYourClassSelector, "wrap"); // opponent class panel.add(new JLabel(t("match.label.opponents_class") + " "), "skip,right"); _currentOpponentClassSelect = new JComboBox<>(localizedClassOptions); panel.add(_currentOpponentClassSelect, "wrap"); // Opponent name panel.add(new JLabel("Opponent's Name: "), "skip,right"); _currentOpponentNameField = new JTextField(); _currentOpponentNameField.setMinimumSize(new Dimension(100, 1)); _currentOpponentNameField.addKeyListener(new KeyAdapter() { public void keyReleased(KeyEvent e) { _analyzer.getMatch().setOpponentName(_currentOpponentNameField.getText().replaceAll("(\r\n|\n)", "<br/>")); } }); panel.add(_currentOpponentNameField, "wrap"); // coin panel.add(new JLabel(t("match.label.coin") + " "), "skip,right"); _currentGameCoinField = new JCheckBox(t("match.coin")); _currentGameCoinField.setSelected(Config.showHsClosedNotification()); _currentGameCoinField.addChangeListener(new ChangeListener() { public void stateChanged(ChangeEvent e) { _analyzer.getMatch().setCoin(_currentGameCoinField.isSelected()); } }); panel.add(_currentGameCoinField, "wrap"); // notes panel.add(new JLabel(t("match.label.notes") + " "), "skip,wrap"); _currentNotesField = new JTextArea(); _currentNotesField.setBorder(BorderFactory.createCompoundBorder( BorderFactory.createMatteBorder(1, 1, 1, 1, Color.black), BorderFactory.createEmptyBorder(3, 6, 3, 6))); _currentNotesField.setMinimumSize(new Dimension(350, 150)); _currentNotesField.setBackground(Color.WHITE); _currentNotesField.addKeyListener(new KeyAdapter() { public void keyReleased(KeyEvent e) { _analyzer.getMatch().setNotes(_currentNotesField.getText()); } }); panel.add(_currentNotesField, "skip,span"); panel.add(new JLabel(" "), "wrap"); // last match panel.add(new JLabel(t("match.label.previous_match") + " "), "skip,wrap"); _lastMatchButton = new JButton("[n/a]"); _lastMatchButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent arg0) { String url = "Arena".equals(_lastMatch.getMode()) ? "http://hearthstats.net/arenas/new" : _lastMatch.getEditUrl(); try { Desktop.getDesktop().browse(new URI(url)); } catch (Throwable e) { Main.showErrorDialog("Error launching browser with URL " + url, e); } } }); _lastMatchButton.setEnabled(false); panel.add(_lastMatchButton, "skip,wrap,span"); return panel; } private JPanel _createDecksUi() { JPanel panel = new JPanel(); MigLayout layout = new MigLayout(); panel.setLayout(layout); panel.add(new JLabel(" "), "wrap"); panel.add(new JLabel(t("set_your_deck_slots")), "skip,span,wrap"); panel.add(new JLabel(" "), "wrap"); panel.add(new JLabel(t("deck_slot.label_1")), "skip"); panel.add(new JLabel(t("deck_slot.label_2")), ""); panel.add(new JLabel(t("deck_slot.label_3")), "wrap"); _deckSlot1Field = new JComboBox<>(); panel.add(_deckSlot1Field, "skip"); _deckSlot2Field = new JComboBox<>(); panel.add(_deckSlot2Field, ""); _deckSlot3Field = new JComboBox<>(); panel.add(_deckSlot3Field, "wrap"); panel.add(new JLabel(" "), "wrap"); panel.add(new JLabel(t("deck_slot.label_4")), "skip"); panel.add(new JLabel(t("deck_slot.label_5")), ""); panel.add(new JLabel(t("deck_slot.label_6")), "wrap"); _deckSlot4Field = new JComboBox<>(); panel.add(_deckSlot4Field, "skip"); _deckSlot5Field = new JComboBox<>(); panel.add(_deckSlot5Field, ""); _deckSlot6Field = new JComboBox<>(); panel.add(_deckSlot6Field, "wrap"); panel.add(new JLabel(" "), "wrap"); panel.add(new JLabel(t("deck_slot.label_7")), "skip"); panel.add(new JLabel(t("deck_slot.label_8")), ""); panel.add(new JLabel(t("deck_slot.label_9")), "wrap"); _deckSlot7Field = new JComboBox<>(); panel.add(_deckSlot7Field, "skip"); _deckSlot8Field = new JComboBox<>(); panel.add(_deckSlot8Field, ""); _deckSlot9Field = new JComboBox<>(); panel.add(_deckSlot9Field, "wrap"); panel.add(new JLabel(" "), "wrap"); panel.add(new JLabel(" "), "wrap"); JButton saveButton = new JButton(t("button.save_deck_slots")); saveButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { _saveDeckSlots(); } }); panel.add(saveButton, "skip"); JButton refreshButton = new JButton(t("button.refresh")); refreshButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { try { _updateDecksTab(); } catch (IOException e1) { Main.showErrorDialog("Error updating decks", e1); } } }); panel.add(refreshButton, "wrap,span"); panel.add(new JLabel(" "), "wrap"); panel.add(new JLabel(" "), "wrap"); JButton myDecksButton = new JButton(t("manage_decks_on_hsnet")); myDecksButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { try { Desktop.getDesktop().browse(new URI(DECKS_URL)); } catch (Throwable e1) { Main.showErrorDialog("Error launching browser with URL" + DECKS_URL, e1); } } }); panel.add(myDecksButton, "skip,span"); return panel; } private JPanel _createOptionsUi() { JPanel panel = new JPanel(); MigLayout layout = new MigLayout(); panel.setLayout(layout); panel.add(new JLabel(" "), "wrap"); // user key panel.add(new JLabel(t("options.label.userkey") + " "), "skip,right"); _userKeyField = new JTextField(); _userKeyField.setText(Config.getUserKey()); panel.add(_userKeyField, "wrap"); // check for updates panel.add(new JLabel(t("options.label.updates") + " "), "skip,right"); _checkUpdatesField = new JCheckBox(t("options.check_updates")); _checkUpdatesField.setSelected(Config.checkForUpdates()); panel.add(_checkUpdatesField, "wrap"); // show notifications panel.add(new JLabel(t("options.label.notifications") + " "), "skip,right"); _notificationsEnabledField = new JCheckBox("Show notifications"); _notificationsEnabledField.setSelected(Config.showNotifications()); _notificationsEnabledField.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { _updateNotificationCheckboxes(); } }); panel.add(_notificationsEnabledField, "wrap"); // When running on Mac OS X 10.8 or later, the format of the notifications can be changed if (Config.isOsxNotificationsSupported()) { panel.add(new JLabel(""), "skip,right"); JLabel notificationsFormatLabel = new JLabel(t("options.label.notifyformat.label")); panel.add(notificationsFormatLabel, "split 2, gapleft 27"); _notificationsFormat = new JComboBox<>(new String[]{ t("options.label.notifyformat.osx"), t("options.label.notifyformat.hearthstats")}); _notificationsFormat.setSelectedIndex(Config.useOsxNotifications() ? 0 : 1); panel.add(_notificationsFormat, "wrap"); } // show HS found notification panel.add(new JLabel(""), "skip,right"); _showHsFoundField = new JCheckBox(t("options.notification.hs_found")); _showHsFoundField.setSelected(Config.showHsFoundNotification()); panel.add(_showHsFoundField, "wrap"); // show HS closed notification panel.add(new JLabel(""), "skip,right"); _showHsClosedField = new JCheckBox(t("options.notification.hs_closed")); _showHsClosedField.setSelected(Config.showHsClosedNotification()); panel.add(_showHsClosedField, "wrap"); // show game screen notification panel.add(new JLabel(""), "skip,right"); _showScreenNotificationField = new JCheckBox(t("options.notification.screen")); _showScreenNotificationField.setSelected(Config.showScreenNotification()); panel.add(_showScreenNotificationField, "wrap"); // show game mode notification panel.add(new JLabel(""), "skip,right"); _showModeNotificationField = new JCheckBox(t("options.notification.mode")); _showModeNotificationField.setSelected(Config.showModeNotification()); panel.add(_showModeNotificationField, "wrap"); // show deck notification panel.add(new JLabel(""), "skip,right"); _showDeckNotificationField = new JCheckBox(t("options.notification.deck")); _showDeckNotificationField.setSelected(Config.showDeckNotification()); panel.add(_showDeckNotificationField, "wrap"); // show your turn notification panel.add(new JLabel(""), "skip,right"); _showYourTurnNotificationField = new JCheckBox(t("options.notification.turn")); _showYourTurnNotificationField.setSelected(Config.showYourTurnNotification()); panel.add(_showYourTurnNotificationField, "wrap"); _updateNotificationCheckboxes(); panel.add(new JLabel(t("options.label.matchpopup")), "skip,right"); showMatchPopupField = new JComboBox<>(new String[]{ t("options.label.matchpopup.always"), t("options.label.matchpopup.incomplete"), t("options.label.matchpopup.never")}); showMatchPopupField.setSelectedIndex(Config.showMatchPopup().ordinal()); panel.add(showMatchPopupField, "wrap"); // minimize to tray panel.add(new JLabel("Interface: "), "skip,right"); _minToTrayField = new JCheckBox(t("options.notification.min_to_tray")); _minToTrayField.setSelected(Config.checkForUpdates()); panel.add(_minToTrayField, "wrap"); // start minimized panel.add(new JLabel(""), "skip,right"); _startMinimizedField = new JCheckBox(t("options.notification.start_min")); _startMinimizedField.setSelected(Config.startMinimized()); panel.add(_startMinimizedField, "wrap"); // analytics panel.add(new JLabel("Analytics: "), "skip,right"); _analyticsField = new JCheckBox(t("options.submit_stats")); final Monitor frame = this; _analyticsField.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { if(!_analyticsField.isSelected()) { int dialogResult = JOptionPane.showConfirmDialog(frame, "A lot of work has gone into this uploader.\n" + "It is provided for free, and all we ask in return\n" + "is that you let us track basic, anonymous statistics\n" + "about how frequently it is being used." + "\n\nAre you sure you want to disable analytics?" , "Please reconsider ...", JOptionPane.YES_NO_OPTION); if (dialogResult == JOptionPane.NO_OPTION){ _analyticsField.setSelected(true); } } } }); _analyticsField.setSelected(Config.analyticsEnabled()); panel.add(_analyticsField, "wrap"); // Save button panel.add(new JLabel(""), "skip,right"); JButton saveOptionsButton = new JButton(t("button.save_options")); saveOptionsButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { _saveOptions(); } }); panel.add(saveOptionsButton, "wrap"); return panel; } private void _updateNotificationCheckboxes() { boolean isEnabled = _notificationsEnabledField.isSelected(); if (_notificationsFormat != null) { _notificationsFormat.setEnabled(isEnabled); } _showHsFoundField.setEnabled(isEnabled); _showHsClosedField.setEnabled(isEnabled); _showScreenNotificationField.setEnabled(isEnabled); _showModeNotificationField.setEnabled(isEnabled); _showDeckNotificationField.setEnabled(isEnabled); } private void _applyDecksToSelector(JComboBox<String> selector, Integer slotNum) { selector.setMaximumSize(new Dimension(145, selector.getSize().height)); selector.removeAllItems(); selector.addItem("- Select a deck -"); List<JSONObject> decks = DeckSlotUtils.getDecks(); for(int i = 0; i < decks.size(); i++) { selector.addItem(decks.get(i).get("name") + " #" + decks.get(i).get("id")); if(decks.get(i).get("slot") != null && decks.get(i).get("slot").toString().equals(slotNum.toString())) selector.setSelectedIndex(i + 1); } } private void _updateDecksTab() throws IOException { DeckSlotUtils.updateDecks(); _applyDecksToSelector(_deckSlot1Field, 1); _applyDecksToSelector(_deckSlot2Field, 2); _applyDecksToSelector(_deckSlot3Field, 3); _applyDecksToSelector(_deckSlot4Field, 4); _applyDecksToSelector(_deckSlot5Field, 5); _applyDecksToSelector(_deckSlot6Field, 6); _applyDecksToSelector(_deckSlot7Field, 7); _applyDecksToSelector(_deckSlot8Field, 8); _applyDecksToSelector(_deckSlot9Field, 9); } private void _checkForUpdates() { if(Config.checkForUpdates()) { Log.info(t("checking_for_updates...")); try { String availableVersion = Updater.getAvailableVersion(); if (availableVersion != null) { Log.info(t("latest_v_available") + " " + availableVersion); if (!availableVersion.matches(Config.getVersion())) { bringWindowToFront(); int dialogButton = JOptionPane.YES_NO_OPTION; int dialogResult = JOptionPane.showConfirmDialog(this, "A new version of this uploader is available\n\n" + Updater.getRecentChanges() + "\n\n" + t("would_u_like_to_install_update") , "HearthStats.net " + t("uploader_updates_avail"), dialogButton); if (dialogResult == JOptionPane.YES_OPTION){ /* // Create Desktop object Desktop d = Desktop.getDesktop(); // Browse a URL, say google.com d.browse(new URI("https://github.com/JeromeDane/HearthStats.net-Uploader/releases")); System.exit(0); */ Updater.run(); } else { dialogResult = JOptionPane.showConfirmDialog(null, t("would_you_like_to_disable_updates"), t("disable_update_checking"), dialogButton); if(dialogResult == JOptionPane.YES_OPTION){ String[] options = { t("button.ok") }; JPanel panel = new JPanel(); JLabel lbl = new JLabel(t("reenable_updates_any_time")); panel.add(lbl); JOptionPane.showOptionDialog(this, panel, t("updates_disabled_msg"), JOptionPane.NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, options , options[0]); Config.setCheckForUpdates(false); } } } } else { Log.warn("Unable to determine latest available version"); } } catch (Throwable e) { e.printStackTrace(System.err); _notify("Update Checking Error", "Unable to determine the latest available version"); } } } protected ScheduledExecutorService scheduledExecutorService = Executors.newScheduledThreadPool(MAX_THREADS); protected boolean _drawPaneAdded = false; protected BufferedImage image; protected JPanel _drawPane = new JPanel() { @Override protected void paintComponent(Graphics g) { super.paintComponent(g); g.drawImage(image, 0, 0, null); } }; protected NotificationQueue _notificationQueue = Config.useOsxNotifications() ? new OsxNotificationQueue() : new DialogNotificationQueue(); private Boolean _currentMatchEnabled = false; private boolean _playingInMatch = false; protected void _notify(String header) { _notify(header, ""); } protected void _notify(String header, String message) { if (!Config.showNotifications()) return; //Notifications disabled _notificationQueue.add(header, message, false); } protected void _updateTitle() { String title = "HearthStats.net Uploader"; if (_hearthstoneDetected) { if (_analyzer.getScreen() != null) { title += " - " + _analyzer.getScreen().title; // if (_analyzer.getScreen() == "Play" && _analyzer.getMode() != null) { if (_analyzer.getScreen() == Screen.PLAY_LOBBY && _analyzer.getMode() != null) { title += " " + _analyzer.getMode(); } if (_analyzer.getScreen() == Screen.FINDING_OPPONENT) { if (_analyzer.getMode() != null) { title += " for " + _analyzer.getMode() + " Game"; } } // TODO: replace with enum values if ("Match Start".equals(_analyzer.getScreen().title) || "Playing".equals(_analyzer.getScreen().title)) { title += " " + (_analyzer.getMode() == null ? "[undetected]" : _analyzer.getMode()); title += " " + (_analyzer.getCoin() ? "" : "No ") + "Coin"; title += " " + (_analyzer.getYourClass() == null ? "[undetected]" : _analyzer.getYourClass()); title += " VS. " + (_analyzer.getOpponentClass() == null ? "[undetected]" : _analyzer.getOpponentClass()); } } } else { title += " - Waiting for Hearthstone "; } setTitle(title); } private int _getClassOptionIndex(String cName) { for (int i = 0; i < hsClassOptions.length; i++) { if (hsClassOptions[i].equals(cName)) { return i; } } return 0; } private void _updateCurrentMatchUi() { HearthstoneMatch match = _analyzer.getMatch(); _updateMatchClassSelectorsIfSet(match); if(_currentMatchEnabled) _currentMatchLabel.setText(match.getMode() + " Match - " + " Turn " + match.getNumTurns()); else _currentMatchLabel.setText("Waiting for next match to start ..."); _currentOpponentNameField.setText(match.getOpponentName()); _currentOpponentClassSelect.setSelectedIndex(_getClassOptionIndex(match.getOpponentClass())); _currentYourClassSelector.setSelectedIndex(_getClassOptionIndex(match.getUserClass())); _currentGameCoinField.setSelected(match.hasCoin()); _currentNotesField.setText(match.getNotes()); // last match if(_lastMatch != null && _lastMatch.getMode() != null) { if(_lastMatch.getResult() != null) { String tooltip = (_lastMatch.getMode().equals("Arena") ? "View current arena run on" : "Edit the previous match") + " on HearthStats.net"; _lastMatchButton.setToolTipText(tooltip); _lastMatchButton.setText(_lastMatch.toString()); _lastMatchButton.setEnabled(true); } } } private void _updateImageFrame() { if (!_drawPaneAdded) { add(_drawPane); } if (image.getWidth() >= 1024) { setSize(image.getWidth(), image.getHeight()); } _drawPane.repaint(); invalidate(); validate(); repaint(); } private void _submitMatchResult(HearthstoneMatch hsMatch) throws IOException { // check for new arena run if ("Arena".equals(hsMatch.getMode()) && _analyzer.isNewArena()) { ArenaRun run = new ArenaRun(); run.setUserClass(hsMatch.getUserClass()); Log.info("Creating new " + run.getUserClass() + "arena run"); _notify("Creating new " + run.getUserClass() + "arena run"); _api.createArenaRun(run); _analyzer.setIsNewArena(false); } String header = "Submitting match result"; String message = hsMatch.toString(); _notify(header, message); Log.matchResult(header + ": " + message); if(Config.analyticsEnabled()) { _analytics.trackAsynchronously(new FocusPoint("Submit" + hsMatch.getMode() + "Match")); } _api.createMatch(hsMatch); } private void _resetMatchClassSelectors() { _currentYourClassSelector.setSelectedIndex(0); _currentOpponentClassSelect.setSelectedIndex(0); } private void _updateMatchClassSelectorsIfSet(HearthstoneMatch hsMatch) { if (_currentYourClassSelector.getSelectedIndex() > 0) { hsMatch.setUserClass(hsClassOptions[_currentYourClassSelector.getSelectedIndex()]); } if (_currentOpponentClassSelect.getSelectedIndex() > 0) { hsMatch.setOpponentClass(hsClassOptions[_currentOpponentClassSelect.getSelectedIndex()]); } } protected void _handleHearthstoneFound(int currentPollIteration) { debugLog.debug(" - Iteration {} found Hearthstone", currentPollIteration); // mark hearthstone found if necessary if (!_hearthstoneDetected) { _hearthstoneDetected = true; debugLog.debug(" - Iteration {} changed hearthstoneDetected to true", currentPollIteration); if (Config.showHsFoundNotification()) { _notify("Hearthstone found"); } } // grab the image from Hearthstone debugLog.debug(" - Iteration {} screen capture", currentPollIteration); image = _hsHelper.getScreenCapture(); if (image == null) { debugLog.debug(" - Iteration {} screen capture returned null", currentPollIteration); } else { // detect image stats if (image.getWidth() >= 1024) { debugLog.debug(" - Iteration {} analysing image", currentPollIteration); _analyzer.analyze(image); } if (Config.mirrorGameImage()) { debugLog.debug(" - Iteration {} mirroring image", currentPollIteration); _updateImageFrame(); } } } protected void _handleHearthstoneNotFound(int currentPollIteration) { // mark hearthstone not found if necessary if (_hearthstoneDetected) { _hearthstoneDetected = false; debugLog.debug(" - Iteration {} changed hearthstoneDetected to false", currentPollIteration); if (Config.showHsClosedNotification()) { _notify("Hearthstone closed"); _analyzer.reset(); } } } protected void _pollHearthstone() { scheduledExecutorService.schedule(new Callable<Object>() { public Object call() throws Exception { _numThreads++; _pollIterations++; // A copy of pollIterations is kept in localPollIterations int currentPollIteration = _pollIterations; try { debugLog.debug("--> Iteration {} started", currentPollIteration); if (_hsHelper.foundProgram()) { _handleHearthstoneFound(currentPollIteration); } else { debugLog.debug(" - Iteration {} did not find Hearthstone", currentPollIteration); _handleHearthstoneNotFound(currentPollIteration); } _updateTitle(); _pollHearthstone(); // repeat the process // Keep memory usage down by telling the JVM to perform a garbage collection after every eighth poll (ie GC 1-2 times per second) if (_pollIterations % GC_FREQUENCY == 0 && Runtime.getRuntime().totalMemory() > 150000000) { debugLog.debug(" - Iteration {} triggers GC", currentPollIteration); System.gc(); } _numThreads--; } catch (Throwable ex) { debugLog.error(" - Iteration " + currentPollIteration + " caused exception which is being ignored:", ex); } finally { debugLog.debug("<-- Iteration {} finished", currentPollIteration); } return ""; } }, POLLING_INTERVAL_IN_MS, TimeUnit.MILLISECONDS); } /** * Checks whether the match result is complete, showing a popup if necessary to fix the match data, * and then submits the match when ready. * * @param match The match to check and submit. */ private void checkMatchResult(final HearthstoneMatch match) { _updateMatchClassSelectorsIfSet(match); final Config.MatchPopup matchPopup = Config.showMatchPopup(); final boolean showPopup; switch (matchPopup) { case ALWAYS: showPopup = true; break; case INCOMPLETE: showPopup = !match.isDataComplete(); break; case NEVER: showPopup = false; break; default: throw new UnsupportedOperationException("Unknown config option " + Config.showMatchPopup()); } if (showPopup) { // Show a popup allowing the user to edit their match before submitting final Monitor monitor = this; SwingUtilities.invokeLater(new Runnable() { @Override public void run() { try { boolean matchHasValidationErrors = !match.isDataComplete(); String infoMessage = null; do { if (infoMessage == null) { infoMessage = (matchPopup == Config.MatchPopup.INCOMPLETE) ? "Some match information couldn't be detected.<br>Please update these details then click Submit to submit the match to HearthStats:" : "The end of the match has been detected.<br>Please check these details then submit the match to HearthStats:"; } bringWindowToFront(); MatchEndPopup.Button buttonPressed = MatchEndPopup.showPopup(monitor, match, infoMessage); matchHasValidationErrors = !match.isDataComplete(); switch (buttonPressed) { case SUBMIT: if (matchHasValidationErrors) { infoMessage = "Some match information is incomplete.<br>Please update these details then click Submit to submit the match to HearthStats:"; } else { _submitMatchResult(match); } break; case CANCEL: return; } } while (matchHasValidationErrors); } catch (IOException e) { Main.showErrorDialog("Error submitting match result", e); } } }); } else { // Don't show a popup, submit the match directly try { _submitMatchResult(match); } catch (IOException e) { Main.showErrorDialog("Error submitting match result", e); } } } private void handleAnalyserEvent(AnalyserEvent changed) throws IOException { switch(changed) { case ARENA_END: _notify("End of Arena Run Detected"); Log.info("End of Arena Run Detected"); _api.endCurrentArenaRun(); break; case COIN: _notify("Coin Detected"); Log.info("Coin Detected"); break; case DECK_SLOT: JSONObject deck = DeckSlotUtils.getDeckFromSlot(_analyzer.getDeckSlot()); if (deck == null) { _tabbedPane.setSelectedIndex(2); bringWindowToFront(); Main.showMessageDialog(this, "Unable to determine what deck you have in slot #" + _analyzer.getDeckSlot() + "\n\nPlease set your decks in the \"Decks\" tab."); } else { _notify("Deck Detected", deck.get("name").toString()); Log.info("Deck Detected: " + deck.get("name") + " Detected"); } break; case MODE: _playingInMatch = false; _setCurrentMatchEnabledi(false); if (Config.showModeNotification()) { debugLog.debug(_analyzer.getMode() + " level " + _analyzer.getRankLevel()); if ("Ranked".equals(_analyzer.getMode())) { _notify(_analyzer.getMode() + " Mode Detected", "Rank Level " + _analyzer.getRankLevel()); } else { _notify(_analyzer.getMode() + " Mode Detected"); } } if ("Ranked".equals(_analyzer.getMode())) { Log.info(_analyzer.getMode() + " Mode Detected - Level " + _analyzer.getRankLevel()); } else { Log.info(_analyzer.getMode() + " Mode Detected"); } break; case NEW_ARENA: if(_analyzer.isNewArena()) _notify("New Arena Run Detected"); Log.info("New Arena Run Detected"); break; case OPPONENT_CLASS: _notify("Playing vs " + _analyzer.getOpponentClass()); Log.info("Playing vs " + _analyzer.getOpponentClass()); break; case OPPONENT_NAME: _notify("Opponent: " + _analyzer.getOpponentName()); Log.info("Opponent: " + _analyzer.getOpponentName()); break; case RESULT: _playingInMatch = false; _setCurrentMatchEnabledi(false); _notify(_analyzer.getResult() + " Detected"); Log.info(_analyzer.getResult() + " Detected"); checkMatchResult(_analyzer.getMatch()); break; case SCREEN: boolean inGameModeScreen = (_analyzer.getScreen() == Screen.ARENA_LOBBY || _analyzer.getScreen() == Screen.ARENA_END || _analyzer.getScreen() == Screen.PLAY_LOBBY); if (inGameModeScreen) { if (_playingInMatch && _analyzer.getResult() == null) { _playingInMatch = false; _notify("Detection Error", "Match result was not detected."); Log.info("Detection Error: Match result was not detected."); checkMatchResult(_analyzer.getMatch()); } _playingInMatch = false; } if (_analyzer.getScreen() == Screen.FINDING_OPPONENT) { _resetMatchClassSelectors(); } if (_analyzer.getScreen().group == ScreenGroup.MATCH_START) { _setCurrentMatchEnabledi(true); _playingInMatch = true; } if (_analyzer.getScreen().group != ScreenGroup.MATCH_END && !DO_NOT_NOTIFY_SCREENS.contains(_analyzer.getScreen()) && Config.showScreenNotification()) { if (_analyzer.getScreen() == Screen.PRACTICE_LOBBY) { _notify(_analyzer.getScreen().title + " Screen Detected", "Results are not tracked in practice mode"); } else { _notify(_analyzer.getScreen().title + " Screen Detected"); } } if (_analyzer.getScreen() == Screen.PRACTICE_LOBBY) { Log.info(_analyzer.getScreen().title + " Screen Detected. Result tracking disabled."); } else { if (_analyzer.getScreen() == Screen.MATCH_VS) { Log.divider(); } Log.info(_analyzer.getScreen().title + " Screen Detected"); } break; case YOUR_CLASS: _notify("Playing as " + _analyzer.getYourClass()); Log.info("Playing as " + _analyzer.getYourClass()); break; case YOUR_TURN: if (Config.showYourTurnNotification()) { _notify((_analyzer.isYourTurn() ? "Your" : "Opponent") + " turn detected"); } Log.info((_analyzer.isYourTurn() ? "Your" : "Opponent") + " turn detected"); break; case ERROR_ANALYSING_IMAGE: _notify("Error analysing opponent name image"); Log.info("Error analysing opponent name image"); break; default: _notify("Unhandled event"); Log.info("Unhandled event"); } _updateCurrentMatchUi(); } public LogPane getLogPane() { return _logText; } private void _handleApiEvent(Object changed) { switch(changed.toString()) { case "error": _notify("API Error", _api.getMessage()); Log.error("API Error: " + _api.getMessage()); Main.showMessageDialog(this, "API Error: " + _api.getMessage()); break; case "result": Log.info("API Result: " + _api.getMessage()); _lastMatch = _analyzer.getMatch(); _lastMatch.setId(_api.getLastMatchId()); _setCurrentMatchEnabledi(false); _updateCurrentMatchUi(); // new line after match result if(_api.getMessage().matches(".*(Edit match|Arena match successfully created).*")) { _analyzer.resetMatch(); _resetMatchClassSelectors(); Log.divider(); } break; } } private void _handleProgramHelperEvent(Object changed) { Log.info(changed.toString()); if (changed.toString().matches(".*minimized.*")) { _notify("Hearthstone Minimized", "Warning! No detection possible while minimized."); } if (changed.toString().matches(".*fullscreen.*")) { JOptionPane.showMessageDialog(this, "Hearthstats.net Uploader Warning! \n\nNo detection possible while Hearthstone is in fullscreen mode.\n\nPlease set Hearthstone to WINDOWED mode and close and RESTART Hearthstone.\n\nSorry for the inconvenience."); } if (changed.toString().matches(".*restored.*")) { _notify("Hearthstone Restored", "Resuming detection ..."); } } @Override public void update(Observable dispatcher, Object changed) { if (dispatcher.getClass().isAssignableFrom(HearthstoneAnalyser.class)) try { handleAnalyserEvent((AnalyserEvent) changed); } catch (IOException e) { Main.showErrorDialog("Error handling analyzer event", e); } if(dispatcher.getClass().isAssignableFrom(API.class)) _handleApiEvent(changed); if(dispatcher.getClass().toString().matches(".*ProgramHelper(Windows|Osx)?")) _handleProgramHelperEvent(changed); } @Override public void windowActivated(WindowEvent e) { // TODO Auto-generated method stub } @Override public void windowClosed(WindowEvent e) { // TODO Auto-generated method stub debugLog.debug("closed"); } @Override public void windowClosing(WindowEvent e) { Point p = getLocationOnScreen(); Config.setX(p.x); Config.setY(p.y); Dimension rect = getSize(); Config.setWidth((int) rect.getWidth()); Config.setHeight((int) rect.getHeight()); try { Config.save(); } catch (Throwable t) { Log.warn("Error occurred trying to write settings file, your settings may not be saved", t); } System.exit(0); } @Override public void windowDeactivated(WindowEvent e) { // TODO Auto-generated method stub } @Override public void windowDeiconified(WindowEvent e) { // TODO Auto-generated method stub } @Override public void windowIconified(WindowEvent e) { // TODO Auto-generated method stub } @Override public void windowOpened(WindowEvent e) { // TODO Auto-generated method stub } private Integer _getDeckSlotDeckId(JComboBox selector) { Integer deckId = null; String deckStr = (String) selector.getItemAt(selector.getSelectedIndex()); Pattern pattern = Pattern.compile("[^0-9]+([0-9]+)$"); Matcher matcher = pattern.matcher(deckStr); if(matcher.find()) { deckId = Integer.parseInt(matcher.group(1)); } return deckId; } private void _saveDeckSlots() { try { _api.setDeckSlots( _getDeckSlotDeckId(_deckSlot1Field), _getDeckSlotDeckId(_deckSlot2Field), _getDeckSlotDeckId(_deckSlot3Field), _getDeckSlotDeckId(_deckSlot4Field), _getDeckSlotDeckId(_deckSlot5Field), _getDeckSlotDeckId(_deckSlot6Field), _getDeckSlotDeckId(_deckSlot7Field), _getDeckSlotDeckId(_deckSlot8Field), _getDeckSlotDeckId(_deckSlot9Field) ); Main.showMessageDialog(this, _api.getMessage()); _updateDecksTab(); } catch (Throwable e) { Main.showErrorDialog("Error saving deck slots", e); } } private void _saveOptions() { debugLog.debug("Saving options..."); Config.setUserKey(_userKeyField.getText()); Config.setCheckForUpdates(_checkUpdatesField.isSelected()); Config.setShowNotifications(_notificationsEnabledField.isSelected()); Config.setShowHsFoundNotification(_showHsFoundField.isSelected()); Config.setShowHsClosedNotification(_showHsClosedField.isSelected()); Config.setShowScreenNotification(_showScreenNotificationField.isSelected()); Config.setShowModeNotification(_showModeNotificationField.isSelected()); Config.setShowDeckNotification(_showDeckNotificationField.isSelected()); Config.setShowYourTurnNotification(_showYourTurnNotificationField.isSelected()); Config.setShowMatchPopup(Config.MatchPopup.values()[showMatchPopupField.getSelectedIndex()]); Config.setAnalyticsEnabled(_analyticsField.isSelected()); Config.setMinToTray(_minToTrayField.isSelected()); Config.setStartMinimized(_startMinimizedField.isSelected()); if (_notificationsFormat != null) { // This control only appears on OS X machines, will be null on Windows machines Config.setUseOsxNotifications(_notificationsFormat.getSelectedIndex() == 0); _notificationQueue = Config.useOsxNotifications() ? new OsxNotificationQueue() : new DialogNotificationQueue(); } try { Config.save(); debugLog.debug("...save complete"); JOptionPane.showMessageDialog(this, "Options Saved"); } catch (Throwable e) { Log.warn("Error occurred trying to write settings file, your settings may not be saved", e); JOptionPane.showMessageDialog(null, "Error occurred trying to write settings file, your settings may not be saved"); } } private void _setCurrentMatchEnabledi(Boolean enabled){ _currentMatchEnabled = enabled; _currentYourClassSelector.setEnabled(enabled); _currentOpponentClassSelect.setEnabled(enabled); _currentGameCoinField.setEnabled(enabled); _currentOpponentNameField.setEnabled(enabled); _currentNotesField.setEnabled(enabled); } //http://stackoverflow.com/questions/7461477/how-to-hide-a-jframe-in-system-tray-of-taskbar TrayIcon trayIcon; SystemTray tray; private void _enableMinimizeToTray(){ if(SystemTray.isSupported()){ tray = SystemTray.getSystemTray(); ActionListener exitListener = new ActionListener() { public void actionPerformed(ActionEvent e) { System.exit(0); } }; PopupMenu popup = new PopupMenu(); MenuItem defaultItem = new MenuItem("Restore"); defaultItem.setFont(new Font("Arial",Font.BOLD,14)); defaultItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { setVisible(true); setExtendedState(JFrame.NORMAL); } }); popup.add(defaultItem); defaultItem = new MenuItem("Exit"); defaultItem.addActionListener(exitListener); defaultItem.setFont(new Font("Arial",Font.PLAIN,14)); popup.add(defaultItem); Image icon = new ImageIcon(getClass().getResource("/images/icon.png")).getImage(); trayIcon = new TrayIcon(icon, "HearthStats.net Uploader", popup); trayIcon.setImageAutoSize(true); trayIcon.addMouseListener(new MouseAdapter(){ public void mousePressed(MouseEvent e){ if(e.getClickCount() >= 2){ setVisible(true); setExtendedState(JFrame.NORMAL); } } }); } else { debugLog.debug("system tray not supported"); } addWindowStateListener(new WindowStateListener() { public void windowStateChanged(WindowEvent e) { if (Config.minimizeToTray()) { if (e.getNewState() == ICONIFIED) { try { tray.add(trayIcon); setVisible(false); } catch (AWTException ex) { } } if (e.getNewState()==7) { try{ tray.add(trayIcon); setVisible(false); } catch(AWTException ex){ } } if (e.getNewState()==MAXIMIZED_BOTH) { tray.remove(trayIcon); setVisible(true); } if (e.getNewState()==NORMAL) { tray.remove(trayIcon); setVisible(true); debugLog.debug("Tray icon removed"); } } } }); } }
Added acknowledgement for jcrka
uploader/src/main/java/net/hearthstats/Monitor.java
Added acknowledgement for jcrka
Java
bsd-3-clause
78d8c62e07679f1fa3a264d2cf8d918ae929073b
0
asamgir/openspecimen,NCIP/catissue-core,krishagni/openspecimen,krishagni/openspecimen,NCIP/catissue-core,NCIP/catissue-core,krishagni/openspecimen,asamgir/openspecimen,asamgir/openspecimen
/** *<p>Title: </p> *<p>Description: </p> *<p>Copyright: (c) Washington University, School of Medicine 2004</p> *<p>Company: Washington University, School of Medicine, St. Louis.</p> *@author Aarti Sharma *@version 1.0 */ package edu.wustl.common.security; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.Vector; import org.apache.commons.lang.StringUtils; import edu.wustl.catissuecore.domain.AbstractDomainObject; import edu.wustl.catissuecore.util.Permissions; import edu.wustl.catissuecore.util.global.Constants; import edu.wustl.common.beans.NameValueBean; import edu.wustl.common.beans.SecurityDataBean; import edu.wustl.common.security.exceptions.SMException; import edu.wustl.common.security.exceptions.SMTransactionException; import edu.wustl.common.util.logger.Logger; import gov.nih.nci.security.AuthenticationManager; import gov.nih.nci.security.AuthorizationManager; import gov.nih.nci.security.SecurityServiceProvider; import gov.nih.nci.security.UserProvisioningManager; import gov.nih.nci.security.authorization.domainobjects.Application; import gov.nih.nci.security.authorization.domainobjects.Group; import gov.nih.nci.security.authorization.domainobjects.Privilege; import gov.nih.nci.security.authorization.domainobjects.ProtectionElement; import gov.nih.nci.security.authorization.domainobjects.ProtectionElementPrivilegeContext; import gov.nih.nci.security.authorization.domainobjects.ProtectionGroup; import gov.nih.nci.security.authorization.domainobjects.Role; import gov.nih.nci.security.authorization.domainobjects.User; import gov.nih.nci.security.dao.ApplicationSearchCriteria; import gov.nih.nci.security.dao.GroupSearchCriteria; import gov.nih.nci.security.dao.ProtectionGroupSearchCriteria; import gov.nih.nci.security.dao.RoleSearchCriteria; import gov.nih.nci.security.dao.SearchCriteria; import gov.nih.nci.security.dao.UserSearchCriteria; import gov.nih.nci.security.exceptions.CSException; import gov.nih.nci.security.exceptions.CSObjectNotFoundException; import gov.nih.nci.security.exceptions.CSTransactionException; /** *<p>Title: </p> *<p>Description: </p> *<p>Copyright: (c) Washington University, School of Medicine 2005</p> *<p>Company: Washington University, School of Medicine, St. Louis.</p> *@author Aarti Sharma *@version 1.0 */ public class SecurityManager implements Permissions { private static AuthenticationManager authenticationManager = null; private static AuthorizationManager authorizationManager = null; private Class requestingClass = null; private static final String CATISSUE_CORE_CONTEXT_NAME = "catissuecore"; private static final String ADMINISTRATOR_ROLE = "1"; private static final String SUPERVISOR_ROLE = "2"; private static final String TECHNICIAN_ROLE = "3"; private static final String PUBLIC_ROLE = "7"; private static final String ADMINISTRATOR_GROUP = "ADMINISTRATOR_GROUP"; private static final String SUPERVISOR_GROUP = "SUPERVISOR_GROUP"; private static final String TECHNICIAN_GROUP = "TECHNICIAN_GROUP"; private static final String PUBLIC_GROUP = "PUBLIC_GROUP"; private static final String ADMINISTRATOR_GROUP_ID = "1"; private static final String SUPERVISOR_GROUP_ID = "2"; private static final String TECHNICIAN_GROUP_ID = "3"; private static final String PUBLIC_GROUP_ID = "4"; /** * @param class1 */ public SecurityManager(Class class1) { requestingClass = class1; } /** * @param class1 * @return */ public static SecurityManager getInstance(Class class1) { return new SecurityManager(class1); } /** * Returns the AuthenticationManager for the caTISSUE Core. This method follows the * singleton pattern so that only one AuthenticationManager is created for * the caTISSUE Core. * * @return * @throws CSException */ protected AuthenticationManager getAuthenticationManager() throws CSException { if (authenticationManager == null) { synchronized (requestingClass) { if (authenticationManager == null) { authenticationManager = SecurityServiceProvider .getAuthenticationManager(CATISSUE_CORE_CONTEXT_NAME); } } } return authenticationManager; } /** * Returns the Authorization Manager for the caTISSUE Core. * This method follows the singleton pattern so that * only one AuthorizationManager is created. * * @return * @throws CSException */ protected AuthorizationManager getAuthorizationManager() throws CSException { if (authorizationManager == null) { synchronized (requestingClass) { if (authorizationManager == null) { authorizationManager = SecurityServiceProvider .getAuthorizationManager(CATISSUE_CORE_CONTEXT_NAME); } } } return authorizationManager; } /** * Returns the UserProvisioningManager singleton object. * * @return * @throws CSException */ protected UserProvisioningManager getUserProvisioningManager() throws CSException { UserProvisioningManager userProvisioningManager = (UserProvisioningManager) getAuthorizationManager(); return userProvisioningManager; } public Application getApplication(String applicationName) throws CSException { Application application = new Application(); application.setApplicationName(applicationName); ApplicationSearchCriteria applicationSearchCriteria = new ApplicationSearchCriteria( application); application = (Application) getUserProvisioningManager().getObjects( applicationSearchCriteria).get(0); return application; } /** * Returns true or false depending on the person gets authenticated or not. * @param requestingClass * @param loginName login name * @param password password * @return * @throws CSException */ public boolean login(String loginName, String password) throws SMException { boolean loginSuccess = false; try { Logger.out.debug("login name: " + loginName + " passowrd: " + password); AuthenticationManager authMngr = getAuthenticationManager(); loginSuccess = authMngr.login(loginName, password); } catch (CSException ex) { Logger.out .debug("Authentication|" + requestingClass + "|" + loginName + "|login|Success| Authentication is not successful for user " + loginName + "|" + ex.getMessage()); throw new SMException(ex.getMessage(), ex); } return loginSuccess; } /** * This method creates a new User in the database based on the data passed * @param user user to be created * @throws SMTransactionException If there is any exception in creating the User */ public void createUser(User user) throws SMTransactionException { try { getUserProvisioningManager().createUser(user); } catch (CSTransactionException e) { Logger.out.debug("Unable to create user: Exception: " + e.getMessage()); throw new SMTransactionException(e.getMessage(), e); } catch (CSException e) { Logger.out.debug("Unable to create user: Exception: " + e); } } /** * This method returns the User object from the database for the passed User's Login Name. * If no User is found then null is returned * @param loginName Login name of the user * @return * @throws SMException */ public User getUser(String loginName) throws SMException { try { return getAuthorizationManager().getUser(loginName); } catch (CSException e) { Logger.out .debug("Unable to get user: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } } /** * This method checks whether a user exists in the database or not * @param loginName Login name of the user * @return TRUE is returned if a user exists else FALSE is returned * @throws SMException */ public boolean userExists(String loginName) throws SMException { boolean userExists = true; try { if (getUser(loginName) == null) { userExists = false; } } catch (SMException e) { Logger.out .debug("Unable to get user: Exception: " + e.getMessage()); throw e; } return userExists; } /** * This method returns Vactor of all the role objects defined for the application from the database * @return * @throws SMException */ public Vector getRoles() throws SMException { Vector roles = new Vector(); UserProvisioningManager userProvisioningManager = null; try { userProvisioningManager = getUserProvisioningManager(); roles.add(userProvisioningManager.getRoleById(ADMINISTRATOR_ROLE)); roles.add(userProvisioningManager.getRoleById(SUPERVISOR_ROLE)); roles.add(userProvisioningManager.getRoleById(TECHNICIAN_ROLE)); roles.add(userProvisioningManager.getRoleById(PUBLIC_ROLE)); } catch (CSException e) { Logger.out.debug("Unable to get roles: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } return roles; } /** * Assigns a Role to a User * @param userName - the User Name to to whom the Role will be assigned * @param roleID - The id of the Role which is to be assigned to the user * @throws SMException */ public void assignRoleToUser(String userName, String roleID) throws SMException { Logger.out.debug("UserName: " + userName + " Role ID:" + roleID); UserProvisioningManager userProvisioningManager = null; User user; try { userProvisioningManager = getUserProvisioningManager(); user = userProvisioningManager.getUser(userName); //Remove user from any other role if he is assigned some userProvisioningManager.removeUserFromGroup(ADMINISTRATOR_ROLE, String.valueOf(user.getUserId())); userProvisioningManager.removeUserFromGroup(SUPERVISOR_ROLE, String .valueOf(user.getUserId())); userProvisioningManager.removeUserFromGroup(TECHNICIAN_ROLE, String .valueOf(user.getUserId())); userProvisioningManager.removeUserFromGroup(PUBLIC_GROUP_ID, String .valueOf(user.getUserId())); if (roleID.equals(ADMINISTRATOR_ROLE)) { assignAdditionalGroupsToUser(String.valueOf(user.getUserId()), new String[]{ADMINISTRATOR_GROUP_ID}); Logger.out.debug(" User assigned Administrator role"); } else if (roleID.equals(SUPERVISOR_ROLE)) { assignAdditionalGroupsToUser(String.valueOf(user.getUserId()), new String[]{SUPERVISOR_GROUP_ID}); Logger.out.debug(" User assigned supervisor role"); } else if (roleID.equals(TECHNICIAN_ROLE)) { assignAdditionalGroupsToUser(String.valueOf(user.getUserId()), new String[]{TECHNICIAN_GROUP_ID}); Logger.out.debug(" User assigned technician role"); } else if (roleID.equals(PUBLIC_ROLE)) { assignAdditionalGroupsToUser(String.valueOf(user.getUserId()), new String[]{PUBLIC_GROUP_ID}); Logger.out.debug(" User assigned public role"); } else { Logger.out.debug(" User assigned no role"); } } catch (CSException e) { Logger.out.debug("UNABLE TO ASSIGN ROLE TO USER: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } } public Role getUserRole(long userID) throws SMException { Set groups; UserProvisioningManager userProvisioningManager = null; Iterator it; Group group; Role role = null; try { userProvisioningManager = getUserProvisioningManager(); groups = userProvisioningManager.getGroups(String.valueOf(userID)); it = groups.iterator(); while (it.hasNext()) { group = (Group) it.next(); if (group.getGroupName().equals(ADMINISTRATOR_GROUP)) { role = userProvisioningManager .getRoleById(ADMINISTRATOR_ROLE); return role; } else if (group.getGroupName().equals(SUPERVISOR_GROUP)) { role = userProvisioningManager.getRoleById(SUPERVISOR_ROLE); return role; } else if (group.getGroupName().equals(TECHNICIAN_GROUP)) { role = userProvisioningManager.getRoleById(TECHNICIAN_ROLE); return role; } else if (group.getGroupName().equals(PUBLIC_GROUP)) { role = userProvisioningManager.getRoleById(PUBLIC_ROLE); return role; } } } catch (CSException e) { Logger.out.debug("Unable to get roles: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } return role; } /** * Modifies an entry for an existing User in the database based on the data passed * @param user - the User object that needs to be modified in the database * @throws SMException if there is any exception in modifying the User in the database */ public void modifyUser(User user) throws SMException { try { getUserProvisioningManager().modifyUser(user); } catch (CSException e) { Logger.out.debug("Unable to modify user: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } } /** * Returns the User object for the passed User id * @param userId - The id of the User object which is to be obtained * @return The User object from the database for the passed User id * @throws SMException if the User object is not found for the given id */ public User getUserById(String userId) throws SMException { Logger.out.debug("user Id: " + userId); try { User user = getUserProvisioningManager().getUserById(userId); Logger.out.debug("User returned: " + user.getLoginName()); return user; } catch (CSException e) { Logger.out.debug("Unable to get user by Id: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } } /** * Returns list of the User objects for the passed email address * @param emailAddress - Email Address for which users need to be searched * @return * @throws SMException if there is any exception while querying the database */ public List getUsersByEmail(String emailAddress) throws SMException { try { User user = new User(); user.setEmailId(emailAddress); SearchCriteria searchCriteria = new UserSearchCriteria(user); return getUserProvisioningManager().getObjects(searchCriteria); } catch (CSException e) { Logger.out.debug("Unable to get users by emailAddress: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } } /** * @throws SMException * */ public List getUsers() throws SMException { try { User user = new User(); SearchCriteria searchCriteria = new UserSearchCriteria(user); return getUserProvisioningManager().getObjects(searchCriteria); } catch (CSException e) { Logger.out.debug("Unable to get all users: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } } /** * Checks wether the user has EXECUTE privilege on the Action subclass of * SecureAction. * * @param string * @return * @throws CSException */ public boolean isAuthorizedToExecuteAction(String loginName, String objectId) throws Exception { Logger.out.debug("Login Name: " + loginName); User user = getUser(loginName); // String objectId = getObjectIdForSecureMethodAccess(); Logger.out.debug("The User name is: " + user.getName()); Logger.out.debug("The Object ID is: " + objectId); boolean isAuthorized = false; try { isAuthorized = getAuthorizationManager().checkPermission( user.getName(), objectId, EXECUTE); } catch (CSException ex) { Logger.out.fatal("The Security Service encountered " + "a fatal exception.", ex); throw new Exception( "The Security Service encountered a fatal exception.", ex); } return isAuthorized; } // /** // * Returns the object id of the protection element that represents // * the Action that is being requested for invocation. // * @param clazz // * @return // */ // private String getObjectIdForSecureMethodAccess() // { // return requestingClass.getName(); // } /** * Returns list of objects corresponding to the searchCriteria passed * @param searchCriteria * @return List of resultant objects * @throws SMException if searchCriteria passed is null or if search results in no results * @throws CSException */ private List getObjects(SearchCriteria searchCriteria) throws SMException, CSException { if (null == searchCriteria) { Logger.out.debug(" Null Parameters passed"); throw new SMException("Null Parameters passed"); } UserProvisioningManager userProvisioningManager = getUserProvisioningManager(); List list = userProvisioningManager.getObjects(searchCriteria); if (null == list || list.size() <= 0) { Logger.out.debug("Search resulted in no results"); throw new SMException("Search resulted in no results"); } return list; } public void assignAdditionalGroupsToUser(String userId, String[] groupIds) throws SMException { if (userId == null || groupIds == null || groupIds.length < 1) { Logger.out.debug(" Null or insufficient Parameters passed"); throw new SMException("Null or insufficient Parameters passed"); } Logger.out.debug(" userId: " + userId + " groupIds:" + groupIds); Set consolidatedGroupIds = new HashSet(); Set consolidatedGroups; String[] finalUserGroupIds; UserProvisioningManager userProvisioningManager; User user; UserSearchCriteria userSearchCriteria; Group group = new Group(); GroupSearchCriteria groupSearchCriteria; List list; try { userProvisioningManager = getUserProvisioningManager(); // user = new User(); // user.setUserId(userId); // userSearchCriteria = new UserSearchCriteria(user); // list = getObjects(userSearchCriteria); // user = (User)(list.get(0)); // if(user == null ) // { // Logger.out.debug("User with user ID "+userId+" not found"); // throw new SMException("User with user ID "+userId+" not found"); // } consolidatedGroups = userProvisioningManager.getGroups(userId); if (null != consolidatedGroups) { Iterator it = consolidatedGroups.iterator(); while (it.hasNext()) { group = (Group) it.next(); consolidatedGroupIds .add(String.valueOf(group.getGroupId())); } } /** * Consolidating all the Groups */ for (int i = 0; i < groupIds.length; i++) { consolidatedGroupIds.add(groupIds[i]); } finalUserGroupIds = new String[consolidatedGroupIds.size()]; Iterator it = consolidatedGroupIds.iterator(); for (int i = 0; it.hasNext(); i++) { finalUserGroupIds[i] = (String) it.next(); Logger.out.debug("Group user is assigned to: " + finalUserGroupIds[i]); } /** * Setting groups for user and updating it */ userProvisioningManager.assignGroupsToUser(userId, finalUserGroupIds); } catch (CSException ex) { Logger.out.fatal("The Security Service encountered " + "a fatal exception.", ex); throw new SMException( "The Security Service encountered a fatal exception.", ex); } } /** * This method creates protection elements corresponding to protection objects * passed and associates them with static as well as dynamic protection groups * that are passed. It also creates user group, role, protection group mapping for * all the elements in authorization data * @param authorizationData Vector of SecurityDataBean objects * @param protectionObjects Set of AbstractDomainObject instances * @param dynamicGroups Array of dynamic group names * @throws SMException */ public void insertAuthorizationData(Vector authorizationData, Set protectionObjects, String[] dynamicGroups) throws SMException { Set protectionElements; Iterator it; try { Logger.out .debug("************** Inserting authorization Data ***************"); /** * Create protection elements corresponding to all protection objects */ protectionElements = createProtectionElementsFromProtectionObjects(protectionObjects); /** * Create user group role protection group and their mappings if required */ createUserGroupRoleProtectionGroup(authorizationData, protectionElements); /** * Assigning protection elements to dynamic groups */ assignProtectionElementsToGroups(protectionElements, dynamicGroups); Logger.out .debug("************** Inserted authorization Data ***************"); } catch (CSException e) { Logger.out.fatal("The Security Service encountered " + "a fatal exception.", e); throw new SMException( "The Security Service encountered a fatal exception.", e); } } /** * This method assigns Protection Elements passed to the Protection group names passed. * @param protectionElements * @param groups * @throws CSException */ private void assignProtectionElementsToGroups(Set protectionElements, String[] groups) { ProtectionElement protectionElement; Iterator it; if (groups != null) { for (int i = 0; i < groups.length; i++) { for (it = protectionElements.iterator(); it.hasNext();) { protectionElement = (ProtectionElement) it.next(); assignProtectionElementToGroup(protectionElement, groups[i]); } } } } /** * This method creates user group, role, protection group mappings in database for the * passed authorizationData. It also adds protection elements to the protection groups * for which mapping is made. * For each element in authorization Data passed: * User group is created and users are added to user group if * one does not exist by the name passed. Similarly Protection Group is created * and protection elements are added to it if one does not exist. * Finally user group and protection group are associated with each other * by the role they need to be associated with. * If no role exists by the name an exception is thrown and the corresponding mapping is not created * @param authorizationData * @param protectionElements * @throws CSException * @throws SMException */ private void createUserGroupRoleProtectionGroup(Vector authorizationData, Set protectionElements) throws CSException, SMException { ProtectionElement protectionElement; ProtectionGroup protectionGroup = null; SecurityDataBean userGroupRoleProtectionGroupBean; RoleSearchCriteria roleSearchCriteria; Role role; String[] roleIds = null; List list; ProtectionGroupSearchCriteria protectionGroupSearchCriteria; Group group = new Group(); GroupSearchCriteria groupSearchCriteria; Set userGroup; User user; Iterator it; UserProvisioningManager userProvisioningManager = getUserProvisioningManager(); groupSearchCriteria = new GroupSearchCriteria(group); if (authorizationData != null) { Logger.out.debug(" UserGroupRoleProtectionGroup Size:" + authorizationData.size()); for (int i = 0; i < authorizationData.size(); i++) { Logger.out.debug(" authorizationData:" + i + " " + authorizationData.get(i).toString()); try { userGroupRoleProtectionGroupBean = (SecurityDataBean) authorizationData .get(i); group .setApplication(getApplication(CATISSUE_CORE_CONTEXT_NAME)); group.setGroupName(userGroupRoleProtectionGroupBean .getGroupName()); groupSearchCriteria = new GroupSearchCriteria(group); /** * If group already exists */ try { list = getObjects(groupSearchCriteria); Logger.out.debug("User group " + group.getGroupName() + " already exists"); } /** * If group does not exist already */ catch (SMException ex) { Logger.out.debug("User group " + group.getGroupName() + " does not exist"); // group.setUsers(userGroupRoleProtectionGroupBean.getGroup()); userProvisioningManager.createGroup(group); Logger.out.debug("User group " + group.getGroupName() + " created"); Logger.out.debug("Users added to group : " + group.getUsers()); list = getObjects(groupSearchCriteria); } group = (Group) list.get(0); /** * Assigning group to users in userGroup */ userGroup = userGroupRoleProtectionGroupBean.getGroup(); for (it = userGroup.iterator(); it.hasNext();) { user = (User) it.next(); // userProvisioningManager.assignGroupsToUser(String.valueOf(user.getUserId()),new String[] {String.valueOf(group.getGroupId())}); assignAdditionalGroupsToUser(String.valueOf(user .getUserId()), new String[]{String .valueOf(group.getGroupId())}); Logger.out.debug("userId:" + user.getUserId() + " group Id:" + group.getGroupId()); } protectionGroup = new ProtectionGroup(); protectionGroup .setApplication(getApplication(CATISSUE_CORE_CONTEXT_NAME)); protectionGroup .setProtectionGroupName(userGroupRoleProtectionGroupBean .getProtectionGroupName()); protectionGroupSearchCriteria = new ProtectionGroupSearchCriteria( protectionGroup); /** * If Protection group already exists add protection elements to the group */ try { list = getObjects(protectionGroupSearchCriteria); protectionGroup = (ProtectionGroup) list.get(0); Logger.out.debug(" From Database: " + protectionGroup.toString()); } /** * If the protection group does not already exist create the protection group * and add protection elements to it. */ catch (SMException sme) { protectionGroup .setProtectionElements(protectionElements); userProvisioningManager .createProtectionGroup(protectionGroup); Logger.out.debug("Protection group created: " + protectionGroup.toString()); } role = new Role(); role .setName(userGroupRoleProtectionGroupBean .getRoleName()); roleSearchCriteria = new RoleSearchCriteria(role); list = getObjects(roleSearchCriteria); roleIds = new String[1]; roleIds[0] = String.valueOf(((Role) list.get(0)).getId()); userProvisioningManager.assignGroupRoleToProtectionGroup( String.valueOf(protectionGroup .getProtectionGroupId()), String .valueOf(group.getGroupId()), roleIds); Logger.out.debug("Assigned Group Role To Protection Group " + protectionGroup.getProtectionGroupId() + " " + String.valueOf(group.getGroupId()) + " " + roleIds); } catch (CSTransactionException ex) { Logger.out.error( "Error occured Assigned Group Role To Protection Group " + protectionGroup.getProtectionGroupId() + " " + String.valueOf(group.getGroupId()) + " " + roleIds, ex); } } } } /** * This method creates protection elements from the protection objects passed * and associate them with respective static groups they should be added to * depending on their class name * if the corresponding protection element does not already exist. * @param protectionObjects * @return * @throws CSException */ private Set createProtectionElementsFromProtectionObjects( Set protectionObjects) throws CSException { ProtectionElement protectionElement; Set protectionElements = new HashSet(); ProtectionGroup protectionGroup; List list; AbstractDomainObject protectionObject; String[] staticGroups; Set protectionGroups = null; ProtectionGroupSearchCriteria protectionGroupSearchCriteria; Iterator it; UserProvisioningManager userProvisioningManager = getUserProvisioningManager(); if (protectionObjects != null) { for (it = protectionObjects.iterator(); it.hasNext();) { protectionElement = new ProtectionElement(); protectionObject = (AbstractDomainObject) it.next(); protectionElement.setObjectId(protectionObject.getClass() .getName() + "_" + protectionObject.getSystemIdentifier()); try { /** * In case protection element already exists */ try { protectionElement = userProvisioningManager .getProtectionElement(protectionElement .getObjectId()); Logger.out.debug(" Protection Element: " + protectionElement.getObjectId() + " already exists"); } /** * If protection element does not exist already */ catch (CSObjectNotFoundException csex) { protectionElement .setApplication(getApplication(CATISSUE_CORE_CONTEXT_NAME)); protectionElement .setProtectionElementDescription(protectionObject .getClass().getName() + " object"); protectionElement .setProtectionElementName(protectionObject .getClass().getName() + "_" + protectionObject .getSystemIdentifier()); /** * Adding protection elements to static groups they should be added to */ staticGroups = (String[]) Constants.STATIC_PROTECTION_GROUPS_FOR_OBJECT_TYPES .get(protectionObject.getClass().getName()); if (staticGroups != null) { protectionGroups = new HashSet(); for (int i = 0; i < staticGroups.length; i++) { Logger.out.debug(" group name " + i + " " + staticGroups[i]); protectionGroup = new ProtectionGroup(); protectionGroup .setProtectionGroupName(staticGroups[i]); protectionGroupSearchCriteria = new ProtectionGroupSearchCriteria( protectionGroup); try { list = getObjects(protectionGroupSearchCriteria); protectionGroup = (ProtectionGroup) list .get(0); Logger.out.debug(" From Database: " + protectionGroup.toString()); protectionGroups.add(protectionGroup); } catch (SMException sme) { Logger.out.error( "Error occured while retrieving " + staticGroups[i] + " From Database: ", sme); } } protectionElement .setProtectionGroups(protectionGroups); } userProvisioningManager .createProtectionElement(protectionElement); Logger.out.debug("Protection element created: " + protectionElement.toString()); Logger.out .debug("Protection element added to groups : " + protectionGroups); } protectionElements.add(protectionElement); } catch (CSTransactionException ex) { Logger.out.error( "Error occured while creating Potection Element " + protectionElement .getProtectionElementName(), ex); } } } return protectionElements; } /** * @param protectionElement * @param userProvisioningManager * @param dynamicGroups * @param i * @throws CSException */ private void assignProtectionElementToGroup( ProtectionElement protectionElement, String GroupsName) { try { UserProvisioningManager userProvisioningManager = getUserProvisioningManager(); userProvisioningManager.assignProtectionElement(GroupsName, protectionElement.getObjectId()); Logger.out.debug("Associated protection group: " + GroupsName + " to protectionElement" + protectionElement.getProtectionElementName()); } catch (CSException e) { Logger.out .error( "The Security Service encountered an error while associating protection group: " + GroupsName + " to protectionElement" + protectionElement .getProtectionElementName(), e); } } public boolean isAuthorized(String userName, String objectId, String privilegeName) throws SMException { try { boolean isAuthorized = getAuthorizationManager().checkPermission( userName, objectId, privilegeName); Logger.out.debug(" User:" + userName + " objectId:" + objectId + " privilegeName:" + privilegeName + " isAuthorized:" + isAuthorized); return isAuthorized; } catch (CSException e) { Logger.out.debug("Unable to get all users: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } } public boolean isAuthorized(String userName, String objectId, String attributeName, String privilegeName) throws SMException { try { return getAuthorizationManager().checkPermission(userName, objectId, attributeName, privilegeName); } catch (CSException e) { Logger.out.debug("Unable to get all users: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } } /** * This method returns name of the Protection groupwhich consists of obj as Protection Element and * whose name consists of string nameConsistingOf * @param obj * @param nameConsistingOf * @return * @throws SMException */ public String getProtectionGroupByName(AbstractDomainObject obj, String nameConsistingOf) throws SMException { Set protectionGroups; Iterator it; ProtectionGroup protectionGroup; ProtectionElement protectionElement; String name = null; String protectionElementName = obj.getClass().getName() + "_" + obj.getSystemIdentifier(); try { protectionElement = getAuthorizationManager().getProtectionElement( protectionElementName); protectionGroups = getAuthorizationManager().getProtectionGroups( protectionElement.getProtectionElementId().toString()); it = protectionGroups.iterator(); while (it.hasNext()) { protectionGroup = (ProtectionGroup) it.next(); name = protectionGroup.getProtectionGroupName(); if (name.indexOf(nameConsistingOf) != -1) { Logger.out.debug("protection group by name " + nameConsistingOf + " for Protection Element " + protectionElementName + " is " + name); return name; } } } catch (CSException e) { Logger.out.debug("Unable to get protection group by name " + nameConsistingOf + " for Protection Element " + protectionElementName + e.getMessage()); throw new SMException(e.getMessage(), e); } return name; } /** * Returns name value beans corresponding to all * privileges that can be assigned for Assign Privileges Page * @param userName login name of user logged in * @return */ public Vector getPrivilegesForAssignPrivilege(String userName) { Vector privileges = new Vector(); NameValueBean nameValueBean; nameValueBean = new NameValueBean(Permissions.READ, Permissions.READ); privileges.add(nameValueBean); nameValueBean = new NameValueBean(Permissions.UPDATE, Permissions.UPDATE); privileges.add(nameValueBean); nameValueBean = new NameValueBean(Permissions.CREATE, Permissions.CREATE); privileges.add(nameValueBean); nameValueBean = new NameValueBean(Permissions.USE, Permissions.USE); privileges.add(nameValueBean); return privileges; } /** * This method returns NameValueBeans for all the objects of type objectType * on which user with identifier userID has privilege ASSIGN_<<privilegeName>>. * @param userID * @param objectType * @param privilegeName * @return * @throws SMException thrown if any error occurs while retreiving ProtectionElementPrivilegeContextForUser */ public Vector getObjectsForAssignPrivilege(String userID, String objectType, String privilegeName) throws SMException { Vector objects = new Vector(); NameValueBean nameValueBean; UserProvisioningManager userProvisioningManager; ProtectionElementPrivilegeContext protectionElementPrivilegeContext; Set protectionElementPrivilegeContextSet; Set privileges; Iterator iterator; String objectId; Privilege privilege; try { userProvisioningManager = getUserProvisioningManager(); protectionElementPrivilegeContextSet = userProvisioningManager .getProtectionElementPrivilegeContextForUser(userID); if (protectionElementPrivilegeContextSet != null) { iterator = protectionElementPrivilegeContextSet.iterator(); while (iterator.hasNext()) { protectionElementPrivilegeContext = (ProtectionElementPrivilegeContext) iterator .next(); objectId = protectionElementPrivilegeContext .getProtectionElement().getObjectId(); Logger.out.debug(objectId); if (objectId.indexOf(objectType + "_") != -1) { privileges = protectionElementPrivilegeContext .getPrivileges(); Iterator it = privileges.iterator(); while (it.hasNext()) { privilege = (Privilege) it.next(); Logger.out.debug(objectId + "*************" + privilege.getName()); if (privilege.getName().equals( "ASSIGN_" + privilegeName)) { nameValueBean = new NameValueBean(objectId, objectId); objects.add(nameValueBean); Logger.out.debug(nameValueBean); break; } } } } } } catch (CSException e) { Logger.out.debug("Unable to get objects: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } return objects; } }
WEB-INF/src/edu/wustl/common/security/SecurityManager.java
/** *<p>Title: </p> *<p>Description: </p> *<p>Copyright: (c) Washington University, School of Medicine 2004</p> *<p>Company: Washington University, School of Medicine, St. Louis.</p> *@author Aarti Sharma *@version 1.0 */ package edu.wustl.common.security; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.Vector; import org.apache.commons.lang.StringUtils; import edu.wustl.catissuecore.domain.AbstractDomainObject; import edu.wustl.catissuecore.util.Permissions; import edu.wustl.catissuecore.util.global.Constants; import edu.wustl.common.beans.NameValueBean; import edu.wustl.common.beans.SecurityDataBean; import edu.wustl.common.security.exceptions.SMException; import edu.wustl.common.security.exceptions.SMTransactionException; import edu.wustl.common.util.logger.Logger; import gov.nih.nci.security.AuthenticationManager; import gov.nih.nci.security.AuthorizationManager; import gov.nih.nci.security.SecurityServiceProvider; import gov.nih.nci.security.UserProvisioningManager; import gov.nih.nci.security.authorization.domainobjects.Application; import gov.nih.nci.security.authorization.domainobjects.Group; import gov.nih.nci.security.authorization.domainobjects.ProtectionElement; import gov.nih.nci.security.authorization.domainobjects.ProtectionElementPrivilegeContext; import gov.nih.nci.security.authorization.domainobjects.ProtectionGroup; import gov.nih.nci.security.authorization.domainobjects.Role; import gov.nih.nci.security.authorization.domainobjects.User; import gov.nih.nci.security.dao.ApplicationSearchCriteria; import gov.nih.nci.security.dao.GroupSearchCriteria; import gov.nih.nci.security.dao.ProtectionGroupSearchCriteria; import gov.nih.nci.security.dao.RoleSearchCriteria; import gov.nih.nci.security.dao.SearchCriteria; import gov.nih.nci.security.dao.UserSearchCriteria; import gov.nih.nci.security.exceptions.CSException; import gov.nih.nci.security.exceptions.CSObjectNotFoundException; import gov.nih.nci.security.exceptions.CSTransactionException; /** *<p>Title: </p> *<p>Description: </p> *<p>Copyright: (c) Washington University, School of Medicine 2005</p> *<p>Company: Washington University, School of Medicine, St. Louis.</p> *@author Aarti Sharma *@version 1.0 */ public class SecurityManager implements Permissions { private static AuthenticationManager authenticationManager = null; private static AuthorizationManager authorizationManager = null; private Class requestingClass = null; private static final String CATISSUE_CORE_CONTEXT_NAME = "catissuecore"; private static final String ADMINISTRATOR_ROLE = "1"; private static final String SUPERVISOR_ROLE = "2"; private static final String TECHNICIAN_ROLE = "3"; private static final String PUBLIC_ROLE = "7"; private static final String ADMINISTRATOR_GROUP = "ADMINISTRATOR_GROUP"; private static final String SUPERVISOR_GROUP = "SUPERVISOR_GROUP"; private static final String TECHNICIAN_GROUP = "TECHNICIAN_GROUP"; private static final String PUBLIC_GROUP = "PUBLIC_GROUP"; private static final String ADMINISTRATOR_GROUP_ID = "1"; private static final String SUPERVISOR_GROUP_ID = "2"; private static final String TECHNICIAN_GROUP_ID = "3"; private static final String PUBLIC_GROUP_ID = "4"; /** * @param class1 */ public SecurityManager(Class class1) { requestingClass = class1; } /** * @param class1 * @return */ public static SecurityManager getInstance(Class class1) { return new SecurityManager(class1); } /** * Returns the AuthenticationManager for the caTISSUE Core. This method follows the * singleton pattern so that only one AuthenticationManager is created for * the caTISSUE Core. * * @return * @throws CSException */ protected AuthenticationManager getAuthenticationManager() throws CSException { if (authenticationManager == null) { synchronized (requestingClass) { if (authenticationManager == null) { authenticationManager = SecurityServiceProvider .getAuthenticationManager(CATISSUE_CORE_CONTEXT_NAME); } } } return authenticationManager; } /** * Returns the Authorization Manager for the caTISSUE Core. * This method follows the singleton pattern so that * only one AuthorizationManager is created. * * @return * @throws CSException */ protected AuthorizationManager getAuthorizationManager() throws CSException { if (authorizationManager == null) { synchronized (requestingClass) { if (authorizationManager == null) { authorizationManager = SecurityServiceProvider .getAuthorizationManager(CATISSUE_CORE_CONTEXT_NAME); } } } return authorizationManager; } /** * Returns the UserProvisioningManager singleton object. * * @return * @throws CSException */ protected UserProvisioningManager getUserProvisioningManager() throws CSException { UserProvisioningManager userProvisioningManager = (UserProvisioningManager) getAuthorizationManager(); return userProvisioningManager; } public Application getApplication(String applicationName) throws CSException { Application application = new Application(); application.setApplicationName(applicationName); ApplicationSearchCriteria applicationSearchCriteria = new ApplicationSearchCriteria( application); application = (Application) getUserProvisioningManager().getObjects( applicationSearchCriteria).get(0); return application; } /** * Returns true or false depending on the person gets authenticated or not. * @param requestingClass * @param loginName login name * @param password password * @return * @throws CSException */ public boolean login(String loginName, String password) throws SMException { boolean loginSuccess = false; try { Logger.out.debug("login name: " + loginName + " passowrd: " + password); AuthenticationManager authMngr = getAuthenticationManager(); loginSuccess = authMngr.login(loginName, password); } catch (CSException ex) { Logger.out .debug("Authentication|" + requestingClass + "|" + loginName + "|login|Success| Authentication is not successful for user " + loginName + "|" + ex.getMessage()); throw new SMException(ex.getMessage(), ex); } return loginSuccess; } /** * This method creates a new User in the database based on the data passed * @param user user to be created * @throws SMTransactionException If there is any exception in creating the User */ public void createUser(User user) throws SMTransactionException { try { getUserProvisioningManager().createUser(user); } catch (CSTransactionException e) { Logger.out.debug("Unable to create user: Exception: " + e.getMessage()); throw new SMTransactionException(e.getMessage(), e); } catch (CSException e) { Logger.out.debug("Unable to create user: Exception: " + e); } } /** * This method returns the User object from the database for the passed User's Login Name. * If no User is found then null is returned * @param loginName Login name of the user * @return * @throws SMException */ public User getUser(String loginName) throws SMException { try { return getAuthorizationManager().getUser(loginName); } catch (CSException e) { Logger.out .debug("Unable to get user: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } } /** * This method checks whether a user exists in the database or not * @param loginName Login name of the user * @return TRUE is returned if a user exists else FALSE is returned * @throws SMException */ public boolean userExists(String loginName) throws SMException { boolean userExists = true; try { if (getUser(loginName) == null) { userExists = false; } } catch (SMException e) { Logger.out .debug("Unable to get user: Exception: " + e.getMessage()); throw e; } return userExists; } /** * This method returns Vactor of all the role objects defined for the application from the database * @return * @throws SMException */ public Vector getRoles() throws SMException { Vector roles = new Vector(); UserProvisioningManager userProvisioningManager = null; try { userProvisioningManager = getUserProvisioningManager(); roles.add(userProvisioningManager.getRoleById(ADMINISTRATOR_ROLE)); roles.add(userProvisioningManager.getRoleById(SUPERVISOR_ROLE)); roles.add(userProvisioningManager.getRoleById(TECHNICIAN_ROLE)); roles.add(userProvisioningManager.getRoleById(PUBLIC_ROLE)); } catch (CSException e) { Logger.out.debug("Unable to get roles: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } return roles; } /** * Assigns a Role to a User * @param userName - the User Name to to whom the Role will be assigned * @param roleID - The id of the Role which is to be assigned to the user * @throws SMException */ public void assignRoleToUser(String userName, String roleID) throws SMException { Logger.out.debug("UserName: "+userName+" Role ID:"+roleID); UserProvisioningManager userProvisioningManager = null; User user; try { userProvisioningManager = getUserProvisioningManager(); user = userProvisioningManager.getUser(userName); //Remove user from any other role if he is assigned some userProvisioningManager.removeUserFromGroup(ADMINISTRATOR_ROLE,String.valueOf(user.getUserId())); userProvisioningManager.removeUserFromGroup(SUPERVISOR_ROLE,String.valueOf(user.getUserId())); userProvisioningManager.removeUserFromGroup(TECHNICIAN_ROLE,String.valueOf(user.getUserId())); userProvisioningManager.removeUserFromGroup(PUBLIC_GROUP_ID,String.valueOf(user.getUserId())); if (roleID.equals(ADMINISTRATOR_ROLE)) { assignAdditionalGroupsToUser(String.valueOf(user.getUserId()),new String[] { ADMINISTRATOR_GROUP_ID }); Logger.out.debug(" User assigned Administrator role"); } else if (roleID.equals(SUPERVISOR_ROLE)) { assignAdditionalGroupsToUser(String.valueOf(user.getUserId()),new String[] { SUPERVISOR_GROUP_ID }); Logger.out.debug(" User assigned supervisor role"); } else if (roleID.equals(TECHNICIAN_ROLE)) { assignAdditionalGroupsToUser(String.valueOf(user.getUserId()),new String[] { TECHNICIAN_GROUP_ID }); Logger.out.debug(" User assigned technician role"); } else if (roleID.equals(PUBLIC_ROLE)) { assignAdditionalGroupsToUser(String.valueOf(user.getUserId()),new String[] { PUBLIC_GROUP_ID }); Logger.out.debug(" User assigned public role"); } else { Logger.out.debug(" User assigned no role"); } } catch (CSException e) { Logger.out.debug("UNABLE TO ASSIGN ROLE TO USER: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } } public Role getUserRole(long userID) throws SMException { Set groups; UserProvisioningManager userProvisioningManager = null; Iterator it; Group group; Role role =null; try { userProvisioningManager = getUserProvisioningManager(); groups = userProvisioningManager.getGroups(String.valueOf(userID)); it = groups.iterator(); while(it.hasNext()) { group = (Group) it.next(); if(group.getGroupName().equals(ADMINISTRATOR_GROUP)) { role = userProvisioningManager.getRoleById(ADMINISTRATOR_ROLE); return role; } else if(group.getGroupName().equals(SUPERVISOR_GROUP)) { role = userProvisioningManager.getRoleById(SUPERVISOR_ROLE); return role; } else if(group.getGroupName().equals(TECHNICIAN_GROUP)) { role = userProvisioningManager.getRoleById(TECHNICIAN_ROLE); return role; } else if(group.getGroupName().equals(PUBLIC_GROUP)) { role = userProvisioningManager.getRoleById(PUBLIC_ROLE); return role; } } } catch (CSException e) { Logger.out.debug("Unable to get roles: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } return role; } /** * Modifies an entry for an existing User in the database based on the data passed * @param user - the User object that needs to be modified in the database * @throws SMException if there is any exception in modifying the User in the database */ public void modifyUser(User user) throws SMException { try { getUserProvisioningManager().modifyUser(user); } catch (CSException e) { Logger.out.debug("Unable to modify user: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } } /** * Returns the User object for the passed User id * @param userId - The id of the User object which is to be obtained * @return The User object from the database for the passed User id * @throws SMException if the User object is not found for the given id */ public User getUserById(String userId) throws SMException { Logger.out.debug("user Id: " + userId); try { User user = getUserProvisioningManager().getUserById(userId); Logger.out.debug("User returned: " + user.getLoginName()); return user; } catch (CSException e) { Logger.out.debug("Unable to get user by Id: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } } /** * Returns list of the User objects for the passed email address * @param emailAddress - Email Address for which users need to be searched * @return * @throws SMException if there is any exception while querying the database */ public List getUsersByEmail(String emailAddress) throws SMException { try { User user = new User(); user.setEmailId(emailAddress); SearchCriteria searchCriteria = new UserSearchCriteria(user); return getUserProvisioningManager().getObjects(searchCriteria); } catch (CSException e) { Logger.out.debug("Unable to get users by emailAddress: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } } /** * @throws SMException * */ public List getUsers() throws SMException { try { User user = new User(); SearchCriteria searchCriteria = new UserSearchCriteria(user); return getUserProvisioningManager().getObjects(searchCriteria); } catch (CSException e) { Logger.out.debug("Unable to get all users: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } } /** * Checks wether the user has EXECUTE privilege on the Action subclass of * SecureAction. * * @param string * @return * @throws CSException */ public boolean isAuthorizedToExecuteAction(String loginName, String objectId) throws Exception { Logger.out.debug("Login Name: " + loginName); User user = getUser(loginName); // String objectId = getObjectIdForSecureMethodAccess(); Logger.out.debug("The User name is: " + user.getName()); Logger.out.debug("The Object ID is: " + objectId); boolean isAuthorized = false; try { isAuthorized = getAuthorizationManager().checkPermission( user.getName(), objectId, EXECUTE); } catch (CSException ex) { Logger.out.fatal("The Security Service encountered " + "a fatal exception.", ex); throw new Exception( "The Security Service encountered a fatal exception.", ex); } return isAuthorized; } // /** // * Returns the object id of the protection element that represents // * the Action that is being requested for invocation. // * @param clazz // * @return // */ // private String getObjectIdForSecureMethodAccess() // { // return requestingClass.getName(); // } /** * Returns list of objects corresponding to the searchCriteria passed * @param searchCriteria * @return List of resultant objects * @throws SMException if searchCriteria passed is null or if search results in no results * @throws CSException */ private List getObjects(SearchCriteria searchCriteria) throws SMException, CSException { if (null == searchCriteria) { Logger.out.debug(" Null Parameters passed"); throw new SMException("Null Parameters passed"); } UserProvisioningManager userProvisioningManager = getUserProvisioningManager(); List list = userProvisioningManager.getObjects(searchCriteria); if (null == list || list.size() <= 0) { Logger.out.debug("Search resulted in no results"); throw new SMException("Search resulted in no results"); } return list; } public void assignAdditionalGroupsToUser(String userId, String[] groupIds) throws SMException { if (userId == null || groupIds == null || groupIds.length < 1) { Logger.out.debug(" Null or insufficient Parameters passed"); throw new SMException("Null or insufficient Parameters passed"); } Logger.out.debug(" userId: " + userId + " groupIds:" + groupIds); Set consolidatedGroupIds = new HashSet(); Set consolidatedGroups; String[] finalUserGroupIds; UserProvisioningManager userProvisioningManager; User user; UserSearchCriteria userSearchCriteria; Group group = new Group(); GroupSearchCriteria groupSearchCriteria; List list; try { userProvisioningManager = getUserProvisioningManager(); // user = new User(); // user.setUserId(userId); // userSearchCriteria = new UserSearchCriteria(user); // list = getObjects(userSearchCriteria); // user = (User)(list.get(0)); // if(user == null ) // { // Logger.out.debug("User with user ID "+userId+" not found"); // throw new SMException("User with user ID "+userId+" not found"); // } consolidatedGroups = userProvisioningManager.getGroups(userId); if (null != consolidatedGroups) { Iterator it = consolidatedGroups.iterator(); while (it.hasNext()) { group = (Group) it.next(); consolidatedGroupIds .add(String.valueOf(group.getGroupId())); } } /** * Consolidating all the Groups */ for (int i = 0; i < groupIds.length; i++) { consolidatedGroupIds.add(groupIds[i]); } finalUserGroupIds = new String[consolidatedGroupIds.size()]; Iterator it = consolidatedGroupIds.iterator(); for (int i = 0; it.hasNext(); i++) { finalUserGroupIds[i] = (String) it.next(); Logger.out.debug("Group user is assigned to: " + finalUserGroupIds[i]); } /** * Setting groups for user and updating it */ userProvisioningManager.assignGroupsToUser(userId, finalUserGroupIds); } catch (CSException ex) { Logger.out.fatal("The Security Service encountered " + "a fatal exception.", ex); throw new SMException( "The Security Service encountered a fatal exception.", ex); } } /** * This method creates protection elements corresponding to protection objects * passed and associates them with static as well as dynamic protection groups * that are passed. It also creates user group, role, protection group mapping for * all the elements in authorization data * @param authorizationData Vector of SecurityDataBean objects * @param protectionObjects Set of AbstractDomainObject instances * @param dynamicGroups Array of dynamic group names * @throws SMException */ public void insertAuthorizationData(Vector authorizationData, Set protectionObjects, String[] dynamicGroups) throws SMException { Set protectionElements; Iterator it; try { Logger.out .debug("************** Inserting authorization Data ***************"); /** * Create protection elements corresponding to all protection objects */ protectionElements = createProtectionElementsFromProtectionObjects(protectionObjects); /** * Create user group role protection group and their mappings if required */ createUserGroupRoleProtectionGroup(authorizationData, protectionElements); /** * Assigning protection elements to dynamic groups */ assignProtectionElementsToGroups(protectionElements, dynamicGroups); Logger.out .debug("************** Inserted authorization Data ***************"); } catch (CSException e) { Logger.out.fatal("The Security Service encountered " + "a fatal exception.", e); throw new SMException( "The Security Service encountered a fatal exception.", e); } } /** * This method assigns Protection Elements passed to the Protection group names passed. * @param protectionElements * @param groups * @throws CSException */ private void assignProtectionElementsToGroups(Set protectionElements, String[] groups) { ProtectionElement protectionElement; Iterator it; if (groups != null) { for (int i = 0; i < groups.length; i++) { for (it = protectionElements.iterator(); it.hasNext();) { protectionElement = (ProtectionElement) it.next(); assignProtectionElementToGroup(protectionElement, groups[i]); } } } } /** * This method creates user group, role, protection group mappings in database for the * passed authorizationData. It also adds protection elements to the protection groups * for which mapping is made. * For each element in authorization Data passed: * User group is created and users are added to user group if * one does not exist by the name passed. Similarly Protection Group is created * and protection elements are added to it if one does not exist. * Finally user group and protection group are associated with each other * by the role they need to be associated with. * If no role exists by the name an exception is thrown and the corresponding mapping is not created * @param authorizationData * @param protectionElements * @throws CSException * @throws SMException */ private void createUserGroupRoleProtectionGroup(Vector authorizationData, Set protectionElements) throws CSException, SMException { ProtectionElement protectionElement; ProtectionGroup protectionGroup = null; SecurityDataBean userGroupRoleProtectionGroupBean; RoleSearchCriteria roleSearchCriteria; Role role; String[] roleIds = null; List list; ProtectionGroupSearchCriteria protectionGroupSearchCriteria; Group group = new Group(); GroupSearchCriteria groupSearchCriteria; Set userGroup; User user; Iterator it; UserProvisioningManager userProvisioningManager = getUserProvisioningManager(); groupSearchCriteria = new GroupSearchCriteria(group); if (authorizationData != null) { Logger.out.debug(" UserGroupRoleProtectionGroup Size:"+ authorizationData.size()); for (int i = 0; i < authorizationData.size(); i++) { Logger.out.debug(" authorizationData:"+i+" "+ authorizationData.get(i).toString()); try { userGroupRoleProtectionGroupBean = (SecurityDataBean) authorizationData .get(i); group .setApplication(getApplication(CATISSUE_CORE_CONTEXT_NAME)); group.setGroupName(userGroupRoleProtectionGroupBean .getGroupName()); groupSearchCriteria = new GroupSearchCriteria(group); /** * If group already exists */ try { list = getObjects(groupSearchCriteria); Logger.out.debug("User group " + group.getGroupName() + " already exists"); } /** * If group does not exist already */ catch (SMException ex) { Logger.out.debug("User group " + group.getGroupName() + " does not exist"); // group.setUsers(userGroupRoleProtectionGroupBean.getGroup()); userProvisioningManager.createGroup(group); Logger.out.debug("User group " + group.getGroupName() + " created"); Logger.out.debug("Users added to group : " + group.getUsers()); list = getObjects(groupSearchCriteria); } group = (Group) list.get(0); /** * Assigning group to users in userGroup */ userGroup = userGroupRoleProtectionGroupBean.getGroup(); for (it = userGroup.iterator(); it.hasNext();) { user = (User) it.next(); // userProvisioningManager.assignGroupsToUser(String.valueOf(user.getUserId()),new String[] {String.valueOf(group.getGroupId())}); assignAdditionalGroupsToUser(String.valueOf(user .getUserId()), new String[]{String .valueOf(group.getGroupId())}); Logger.out.debug("userId:" + user.getUserId() + " group Id:" + group.getGroupId()); } protectionGroup = new ProtectionGroup(); protectionGroup .setApplication(getApplication(CATISSUE_CORE_CONTEXT_NAME)); protectionGroup .setProtectionGroupName(userGroupRoleProtectionGroupBean .getProtectionGroupName()); protectionGroupSearchCriteria = new ProtectionGroupSearchCriteria( protectionGroup); /** * If Protection group already exists add protection elements to the group */ try { list = getObjects(protectionGroupSearchCriteria); protectionGroup = (ProtectionGroup) list.get(0); Logger.out.debug(" From Database: " + protectionGroup.toString()); } /** * If the protection group does not already exist create the protection group * and add protection elements to it. */ catch (SMException sme) { protectionGroup .setProtectionElements(protectionElements); userProvisioningManager .createProtectionGroup(protectionGroup); Logger.out.debug("Protection group created: " + protectionGroup.toString()); } role = new Role(); role .setName(userGroupRoleProtectionGroupBean .getRoleName()); roleSearchCriteria = new RoleSearchCriteria(role); list = getObjects(roleSearchCriteria); roleIds = new String[1]; roleIds[0] = String.valueOf(((Role) list.get(0)).getId()); userProvisioningManager.assignGroupRoleToProtectionGroup( String.valueOf(protectionGroup .getProtectionGroupId()), String .valueOf(group.getGroupId()), roleIds); Logger.out.debug("Assigned Group Role To Protection Group " + protectionGroup.getProtectionGroupId() + " " + String.valueOf(group.getGroupId()) + " " + roleIds); } catch (CSTransactionException ex) { Logger.out.error( "Error occured Assigned Group Role To Protection Group " + protectionGroup.getProtectionGroupId() + " " + String.valueOf(group.getGroupId()) + " " + roleIds, ex); } } } } /** * This method creates protection elements from the protection objects passed * and associate them with respective static groups they should be added to * depending on their class name * if the corresponding protection element does not already exist. * @param protectionObjects * @return * @throws CSException */ private Set createProtectionElementsFromProtectionObjects( Set protectionObjects) throws CSException { ProtectionElement protectionElement; Set protectionElements = new HashSet(); ProtectionGroup protectionGroup; List list; AbstractDomainObject protectionObject; String[] staticGroups; Set protectionGroups = null; ProtectionGroupSearchCriteria protectionGroupSearchCriteria; Iterator it; UserProvisioningManager userProvisioningManager = getUserProvisioningManager(); if (protectionObjects != null) { for (it = protectionObjects.iterator(); it.hasNext();) { protectionElement = new ProtectionElement(); protectionObject = (AbstractDomainObject) it.next(); protectionElement.setObjectId(protectionObject.getClass() .getName() + "_" + protectionObject.getSystemIdentifier()); try { /** * In case protection element already exists */ try { protectionElement = userProvisioningManager .getProtectionElement(protectionElement .getObjectId()); Logger.out.debug(" Protection Element: " + protectionElement.getObjectId() + " already exists"); } /** * If protection element does not exist already */ catch (CSObjectNotFoundException csex) { protectionElement .setApplication(getApplication(CATISSUE_CORE_CONTEXT_NAME)); protectionElement .setProtectionElementDescription(protectionObject .getClass().getName() + " object"); protectionElement .setProtectionElementName(protectionObject .getClass().getName() + "_" + protectionObject .getSystemIdentifier()); /** * Adding protection elements to static groups they should be added to */ staticGroups = (String[]) Constants.STATIC_PROTECTION_GROUPS_FOR_OBJECT_TYPES .get(protectionObject.getClass().getName()); if (staticGroups != null) { protectionGroups = new HashSet(); for (int i = 0; i < staticGroups.length; i++) { Logger.out.debug(" group name " + i + " " + staticGroups[i]); protectionGroup = new ProtectionGroup(); protectionGroup .setProtectionGroupName(staticGroups[i]); protectionGroupSearchCriteria = new ProtectionGroupSearchCriteria( protectionGroup); try { list = getObjects(protectionGroupSearchCriteria); protectionGroup = (ProtectionGroup) list .get(0); Logger.out.debug(" From Database: " + protectionGroup.toString()); protectionGroups.add(protectionGroup); } catch (SMException sme) { Logger.out.error( "Error occured while retrieving " + staticGroups[i] + " From Database: ", sme); } } protectionElement .setProtectionGroups(protectionGroups); } userProvisioningManager .createProtectionElement(protectionElement); Logger.out.debug("Protection element created: " + protectionElement.toString()); Logger.out .debug("Protection element added to groups : " + protectionGroups); } protectionElements.add(protectionElement); } catch (CSTransactionException ex) { Logger.out.error( "Error occured while creating Potection Element " + protectionElement .getProtectionElementName(), ex); } } } return protectionElements; } /** * @param protectionElement * @param userProvisioningManager * @param dynamicGroups * @param i * @throws CSException */ private void assignProtectionElementToGroup( ProtectionElement protectionElement, String GroupsName) { try { UserProvisioningManager userProvisioningManager = getUserProvisioningManager(); userProvisioningManager.assignProtectionElement(GroupsName, protectionElement.getObjectId()); Logger.out.debug("Associated protection group: " + GroupsName + " to protectionElement" + protectionElement.getProtectionElementName()); } catch (CSException e) { Logger.out .error( "The Security Service encountered an error while associating protection group: " + GroupsName + " to protectionElement" + protectionElement .getProtectionElementName(), e); } } public boolean isAuthorized(String userName, String objectId, String privilegeName) throws SMException { try { boolean isAuthorized = getAuthorizationManager().checkPermission(userName, objectId, privilegeName); Logger.out.debug(" User:" + userName + " objectId:" + objectId + " privilegeName:" + privilegeName + " isAuthorized:" + isAuthorized); return isAuthorized; } catch (CSException e) { Logger.out.debug("Unable to get all users: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } } public boolean isAuthorized(String userName, String objectId, String attributeName, String privilegeName) throws SMException { try { return getAuthorizationManager().checkPermission(userName, objectId, attributeName, privilegeName); } catch (CSException e) { Logger.out.debug("Unable to get all users: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } } /** * This method returns name of the Protection groupwhich consists of obj as Protection Element and * whose name consists of string nameConsistingOf * @param obj * @param nameConsistingOf * @return * @throws SMException */ public String getProtectionGroupByName(AbstractDomainObject obj, String nameConsistingOf) throws SMException { Set protectionGroups; Iterator it; ProtectionGroup protectionGroup; ProtectionElement protectionElement; String name =null; String protectionElementName = obj.getClass() .getName() + "_" + obj.getSystemIdentifier(); try { protectionElement = getAuthorizationManager().getProtectionElement(protectionElementName); protectionGroups = getAuthorizationManager().getProtectionGroups(protectionElement.getProtectionElementId().toString()); it = protectionGroups.iterator(); while(it.hasNext()) { protectionGroup = (ProtectionGroup) it.next(); name=protectionGroup.getProtectionGroupName(); if(name.indexOf(nameConsistingOf) !=-1) { Logger.out.debug("protection group by name "+nameConsistingOf+" for Protection Element "+protectionElementName+" is "+name ); return name; } } } catch (CSException e) { Logger.out.debug("Unable to get protection group by name "+nameConsistingOf+" for Protection Element "+protectionElementName + e.getMessage()); throw new SMException(e.getMessage(), e); } return name; } /** * Returns name value beans corresponding to all * privileges that can be assigned for Assign Privileges Page * @param userName login name of user logged in * @return */ public Vector getPrivilegesForAssignPrivilege(String userName) { Vector privileges = new Vector(); NameValueBean nameValueBean; nameValueBean = new NameValueBean(Permissions.READ,Permissions.READ); privileges.add(nameValueBean); nameValueBean = new NameValueBean(Permissions.UPDATE,Permissions.UPDATE); privileges.add(nameValueBean); nameValueBean = new NameValueBean(Permissions.CREATE,Permissions.CREATE); privileges.add(nameValueBean); nameValueBean = new NameValueBean(Permissions.USE,Permissions.USE); privileges.add(nameValueBean); return privileges; } /** * This method returns NameValueBeans for all the objects of type objectType * on which user with identifier userID has privilege ASSIGN_<<privilegeName>>. * @param userID * @param objectType * @param privilegeName * @return * @throws SMException thrown if any error occurs while retreiving ProtectionElementPrivilegeContextForUser */ public Vector getObjectsForAssignPrivilege(String userID, String objectType, String privilegeName) throws SMException { Vector objects = new Vector(); NameValueBean nameValueBean; UserProvisioningManager userProvisioningManager; ProtectionElementPrivilegeContext protectionElementPrivilegeContext; Set protectionElementPrivilegeContextSet; Iterator iterator; String objectId; try { userProvisioningManager= getUserProvisioningManager(); protectionElementPrivilegeContextSet = userProvisioningManager.getProtectionElementPrivilegeContextForUser(userID); if(protectionElementPrivilegeContextSet !=null) { iterator = protectionElementPrivilegeContextSet.iterator(); while(iterator.hasNext()) { protectionElementPrivilegeContext = (ProtectionElementPrivilegeContext) iterator.next(); objectId =protectionElementPrivilegeContext.getProtectionElement().getObjectId(); if(objectId.indexOf(objectType)!=-1) { if(protectionElementPrivilegeContext.getPrivileges().contains("ASSIGN_"+privilegeName)) { nameValueBean = new NameValueBean(objectId,objectId); objects.add(nameValueBean); } } } } } catch (CSException e) { Logger.out.debug("Unable to get objects: Exception: " + e.getMessage()); throw new SMException(e.getMessage(), e); } return objects; } }
changes in getObjectsForAssignPrivilege SVN-Revision: 1235
WEB-INF/src/edu/wustl/common/security/SecurityManager.java
changes in getObjectsForAssignPrivilege
Java
mit
cb87ef1f1b8b470dacfdcaff94aeb6a5681a3cb4
0
EasyPost/easypost-java
package com.easypost; public abstract class EasyPost { public static String API_BASE = "https://api.easypost.com/v2"; public static final String VERSION = "4.0.0"; public static String apiKey; public static String apiVersion; public static int readTimeout; }
src/main/java/com/easypost/EasyPost.java
package com.easypost; public abstract class EasyPost { public static String API_BASE = "https://api.easypost.com/v2"; public static final String VERSION = "3.4.2"; public static String apiKey; public static String apiVersion; public static int readTimeout; }
bump version
src/main/java/com/easypost/EasyPost.java
bump version
Java
mit
dc8bf394afd72e2df1c6cf15c76fb47e5006a8a6
0
opennars/opennars,opennars/opennars,opennars/opennars,opennars/opennars
/* * Here comes the text of your license * Each line should be prefixed with * */ package nars.plugin.input; import java.util.ArrayList; import nars.core.EventEmitter; import nars.core.Events; import nars.core.NAR; import nars.core.Parameters; import nars.core.Plugin; import nars.core.control.NAL; import nars.entity.BudgetValue; import nars.entity.Concept; import nars.entity.Sentence; import nars.entity.Stamp; import nars.entity.Task; import nars.entity.TruthValue; import nars.inference.BudgetFunctions; import static nars.inference.TemporalRules.ORDER_CONCURRENT; import static nars.inference.TemporalRules.ORDER_FORWARD; import nars.inference.TruthFunctions; import nars.inference.UtilityFunctions; import nars.io.Symbols; import nars.language.Conjunction; import nars.language.Interval; import nars.language.Term; /** * * @author tc */ public class PerceptionAccel implements Plugin, EventEmitter.EventObserver { @Override public boolean setEnabled(NAR n, boolean enabled) { //register listening to new events: n.memory.event.set(this, enabled, Events.InduceSucceedingEvent.class, Events.ConceptNew.class, Events.ConceptForget.class); return true; } ArrayList<Task> eventbuffer=new ArrayList<>(); int cur_maxlen=1; public void perceive(NAL nal) { //implement Peis idea here now //we start with length 2 compounds, and search for patterns which are one longer than the longest observed one for(int Len=2;Len<=cur_maxlen+1;Len++) { //ok, this is the length we have to collect, measured from the end of event buffer Term[] relterms=new Term[2*Len-1]; //there is a interval term for every event //measuring its distance to the next event, but for the last event this is obsolete //thus it are 2*Len-1] terms Task newEvent=eventbuffer.get(eventbuffer.size()-1); TruthValue truth=newEvent.sentence.truth; Stamp st=new Stamp(nal.memory); int k=0; for(int i=0;i<Len;i++) { int j=eventbuffer.size()-1-(Len-1)+i; //we go till to the end of the event buffer if(j<0) { //event buffer is not filled up enough to support this one, happens at the beginning where event buffer has no elements //but the mechanism already looks for length 2 patterns on the occurence of the first event break; } Task current=eventbuffer.get(j); st.getChain().add(current.sentence.term); relterms[k]=current.sentence.term; if(i!=Len-1) { //if its not the last one, then there is a next one for which we have to put an interval truth=TruthFunctions.deduction(truth, current.sentence.truth); Task next=eventbuffer.get(j+1); relterms[k+1]=Interval.interval(next.sentence.getOccurenceTime()-current.sentence.getOccurenceTime(), nal.memory); } k+=2; } boolean eventBufferDidNotHaveSoMuchEvents=false; for(int i=0;i<relterms.length;i++) { if(relterms[i]==null) { eventBufferDidNotHaveSoMuchEvents=true; } } if(eventBufferDidNotHaveSoMuchEvents) { break; } //decide on the tense of &/ by looking if the first event happens parallel with the last one //Todo refine in 1.6.3 if we want to allow input of difference occurence time boolean after=newEvent.sentence.after(eventbuffer.get(eventbuffer.size()-1-(Len-1)).sentence, nal.memory.param.duration.get()); //critical part: (not checked for correctness yet): //we now have to look at if the first half + the second half already exists as concept, before we add it Term[] firstHalf; Term[] secondHalf; if(relterms[Len-1] instanceof Interval) { //the middle can be a interval, for example in case of a,+1,b , in which case we dont use it firstHalf=new Term[Len-1]; //so we skip the middle here secondHalf=new Term[Len-1]; //as well as here int h=0; //make index mapping easier by counting for(int i=0;i<Len-1;i++) { firstHalf[i]=relterms[h]; h++; } h+=1; //we have to overjump the middle element this is why for(int i=0;i<Len-1;i++) { secondHalf[i]=relterms[h]; h++; } } else { //it is a event so its fine firstHalf=new Term[Len]; //2*Len-1 in total secondHalf=new Term[Len]; //but the middle is also used in the second one int h=0; //make index mapping easier by counting for(int i=0;i<Len;i++) { firstHalf[i]=relterms[h]; h++; } h--; //we have to use the middle twice this is why for(int i=0;i<Len;i++) { secondHalf[i]=relterms[h]; h++; } } Term firstC=Conjunction.make(firstHalf, after ? ORDER_FORWARD : ORDER_CONCURRENT); Term secondC=Conjunction.make(secondHalf, after ? ORDER_FORWARD : ORDER_CONCURRENT); Concept C1=nal.memory.concept(firstC); Concept C2=nal.memory.concept(secondC); if(C1==null || C2==null) { if(debugMechanism) { System.out.println("one didn't exist: "+firstC.toString()+" or "+secondC.toString()); } continue; //the components were not observed, so don't allow creating this compound } Conjunction C=(Conjunction) Conjunction.make(relterms, after ? ORDER_FORWARD : ORDER_CONCURRENT); Sentence S=new Sentence(C,Symbols.JUDGMENT_MARK,truth,st); Task T=new Task(S,new BudgetValue(BudgetFunctions.aveAri(C1.getPriority(), C2.getPriority()),Parameters.DEFAULT_JUDGMENT_DURABILITY,truth)); if(debugMechanism) { System.out.println("success: "+T.toString()); } nal.derivedTask(T, false, false, newEvent, S); //lets make the new event the parent task, and derive it } } //keep track of how many conjunctions with related amount of component terms there are: int sz=100; int[] sv=new int[sz]; //use static array, should suffice for now boolean debugMechanism=false; public void handleConjunctionSequence(Term t, boolean Add) { if(!(t instanceof Conjunction)) { return; } Conjunction c=(Conjunction) t; if(debugMechanism) { System.out.println("handleConjunctionSequence with "+t.toString()+" "+String.valueOf(Add)); } if(Add) { //manage concept counter sv[c.term.length]++; } else { sv[c.term.length]--; } //determine cur_maxlen //by finding the first complexity which exists cur_maxlen=1; //minimum size is 1 (the events itself), in which case only chaining of two will happen for(int i=sz-1;i>=2;i--) { //>=2 because a conjunction with size=1 doesnt exist if(sv[i]>0) { cur_maxlen=i; //dont using the index 0 in sv makes it easier here break; } } if(debugMechanism) { System.out.println("determined max len is "+String.valueOf(cur_maxlen)); } } @Override public void event(Class event, Object[] args) { if (event == Events.InduceSucceedingEvent.class) { //todo misleading event name, it is for a new incoming event Task newEvent = (Task)args[0]; eventbuffer.add(newEvent); while(eventbuffer.size()>cur_maxlen+1) { eventbuffer.remove(0); } NAL nal= (NAL)args[1]; perceive(nal); } if(event == Events.ConceptForget.class) { Concept forgot=(Concept) args[0]; handleConjunctionSequence(forgot.term,false); } if(event == Events.ConceptNew.class) { Concept newC=(Concept) args[0]; handleConjunctionSequence(newC.term,true); } } public static int PERCEPTION_DECISION_ACCEL_SAMPLES = 1; //new inference rule accelerating decision making: https://groups.google.com/forum/#!topic/open-nars/B8veE-WDd8Q //mostly only makes sense if perception plugin is loaded }
nars_java/nars/plugin/input/PerceptionAccel.java
/* * Here comes the text of your license * Each line should be prefixed with * */ package nars.plugin.input; import java.util.ArrayList; import nars.core.EventEmitter; import nars.core.Events; import nars.core.NAR; import nars.core.Parameters; import nars.core.Plugin; import nars.core.control.NAL; import nars.entity.BudgetValue; import nars.entity.Concept; import nars.entity.Sentence; import nars.entity.Stamp; import nars.entity.Task; import nars.entity.TruthValue; import static nars.inference.TemporalRules.ORDER_CONCURRENT; import static nars.inference.TemporalRules.ORDER_FORWARD; import nars.inference.TruthFunctions; import nars.io.Symbols; import nars.language.Conjunction; import nars.language.Interval; import nars.language.Term; /** * * @author tc */ public class PerceptionAccel implements Plugin, EventEmitter.EventObserver { @Override public boolean setEnabled(NAR n, boolean enabled) { //register listening to new events: n.memory.event.set(this, enabled, Events.InduceSucceedingEvent.class, Events.ConceptNew.class, Events.ConceptForget.class); return true; } ArrayList<Task> eventbuffer=new ArrayList<>(); int cur_maxlen=1; public void perceive(NAL nal) { //implement Peis idea here now //we start with length 2 compounds, and search for patterns which are one longer than the longest observed one for(int Len=2;Len<=cur_maxlen+1;Len++) { //ok, this is the length we have to collect, measured from the end of event buffer Term[] relterms=new Term[2*Len-1]; //there is a interval term for every event //measuring its distance to the next event, but for the last event this is obsolete //thus it are 2*Len-1] terms Task newEvent=eventbuffer.get(eventbuffer.size()-1); TruthValue truth=newEvent.sentence.truth; Stamp st=new Stamp(nal.memory); int k=0; for(int i=0;i<Len;i++) { int j=eventbuffer.size()-1-(Len-1)+i; //we go till to the end of the event buffer if(j<0) { //event buffer is not filled up enough to support this one, happens at the beginning where event buffer has no elements //but the mechanism already looks for length 2 patterns on the occurence of the first event break; } Task current=eventbuffer.get(j); st.getChain().add(current.sentence.term); relterms[k]=current.sentence.term; if(i!=Len-1) { //if its not the last one, then there is a next one for which we have to put an interval truth=TruthFunctions.deduction(truth, current.sentence.truth); Task next=eventbuffer.get(j+1); relterms[k+1]=Interval.interval(next.sentence.getOccurenceTime()-current.sentence.getOccurenceTime(), nal.memory); } k+=2; } boolean eventBufferDidNotHaveSoMuchEvents=false; for(int i=0;i<relterms.length;i++) { if(relterms[i]==null) { eventBufferDidNotHaveSoMuchEvents=true; } } if(eventBufferDidNotHaveSoMuchEvents) { break; } //decide on the tense of &/ by looking if the first event happens parallel with the last one //Todo refine in 1.6.3 if we want to allow input of difference occurence time boolean after=newEvent.sentence.after(eventbuffer.get(eventbuffer.size()-1-(Len-1)).sentence, nal.memory.param.duration.get()); //critical part: (not checked for correctness yet): //we now have to look at if the first half + the second half already exists as concept, before we add it Term[] firstHalf; Term[] secondHalf; if(relterms[Len-1] instanceof Interval) { //the middle can be a interval, for example in case of a,+1,b , in which case we dont use it firstHalf=new Term[Len-1]; //so we skip the middle here secondHalf=new Term[Len-1]; //as well as here int h=0; //make index mapping easier by counting for(int i=0;i<Len-1;i++) { firstHalf[i]=relterms[h]; h++; } h+=1; //we have to overjump the middle element this is why for(int i=0;i<Len-1;i++) { secondHalf[i]=relterms[h]; h++; } } else { //it is a event so its fine firstHalf=new Term[Len]; //2*Len-1 in total secondHalf=new Term[Len]; //but the middle is also used in the second one int h=0; //make index mapping easier by counting for(int i=0;i<Len;i++) { firstHalf[i]=relterms[h]; h++; } h--; //we have to use the middle twice this is why for(int i=0;i<Len;i++) { secondHalf[i]=relterms[h]; h++; } } Term firstC=Conjunction.make(firstHalf, after ? ORDER_FORWARD : ORDER_CONCURRENT); Term secondC=Conjunction.make(secondHalf, after ? ORDER_FORWARD : ORDER_CONCURRENT); if(nal.memory.concept(firstC)==null || nal.memory.concept(secondC)==null) { if(debugMechanism) { System.out.println("one didn't exist: "+firstC.toString()+" or "+secondC.toString()); } continue; //the components were not observed, so don't allow creating this compound } Conjunction C=(Conjunction) Conjunction.make(relterms, after ? ORDER_FORWARD : ORDER_CONCURRENT); Sentence S=new Sentence(C,Symbols.JUDGMENT_MARK,truth,st); Task T=new Task(S,new BudgetValue(Parameters.DEFAULT_JUDGMENT_PRIORITY,Parameters.DEFAULT_JUDGMENT_DURABILITY,truth)); if(debugMechanism) { System.out.println("success: "+T.toString()); } nal.derivedTask(T, false, false, newEvent, S); //lets make the new event the parent task, and derive it } } //keep track of how many conjunctions with related amount of component terms there are: int sz=100; int[] sv=new int[sz]; //use static array, should suffice for now boolean debugMechanism=false; public void handleConjunctionSequence(Term t, boolean Add) { if(!(t instanceof Conjunction)) { return; } Conjunction c=(Conjunction) t; if(debugMechanism) { System.out.println("handleConjunctionSequence with "+t.toString()+" "+String.valueOf(Add)); } if(Add) { //manage concept counter sv[c.term.length]++; } else { sv[c.term.length]--; } //determine cur_maxlen //by finding the first complexity which exists cur_maxlen=1; //minimum size is 1 (the events itself), in which case only chaining of two will happen for(int i=sz-1;i>=2;i--) { //>=2 because a conjunction with size=1 doesnt exist if(sv[i]>0) { cur_maxlen=i; //dont using the index 0 in sv makes it easier here break; } } if(debugMechanism) { System.out.println("determined max len is "+String.valueOf(cur_maxlen)); } } @Override public void event(Class event, Object[] args) { if (event == Events.InduceSucceedingEvent.class) { //todo misleading event name, it is for a new incoming event Task newEvent = (Task)args[0]; eventbuffer.add(newEvent); while(eventbuffer.size()>cur_maxlen+1) { eventbuffer.remove(0); } NAL nal= (NAL)args[1]; perceive(nal); } if(event == Events.ConceptForget.class) { Concept forgot=(Concept) args[0]; handleConjunctionSequence(forgot.term,false); } if(event == Events.ConceptNew.class) { Concept newC=(Concept) args[0]; handleConjunctionSequence(newC.term,true); } } public static int PERCEPTION_DECISION_ACCEL_SAMPLES = 1; //new inference rule accelerating decision making: https://groups.google.com/forum/#!topic/open-nars/B8veE-WDd8Q //mostly only makes sense if perception plugin is loaded }
perception adjust priority according to part concepts priority.
nars_java/nars/plugin/input/PerceptionAccel.java
perception adjust priority according to part concepts priority.
Java
mit
496eadb9cac93731c95a8b5db6cccd11acfceb80
0
breadwallet/breadwallet-android,breadwallet/breadwallet-android,breadwallet/breadwallet-android,breadwallet/breadwallet-android
package com.platform.middlewares.plugins; import android.app.Activity; import android.content.Context; import android.util.Log; import com.breadwallet.BreadApp; import com.breadwallet.presenter.entities.CryptoRequest; import com.breadwallet.presenter.interfaces.BRAuthCompletion; import com.breadwallet.tools.animation.BRDialog; import com.breadwallet.tools.manager.BREventManager; import com.breadwallet.tools.manager.BRReportsManager; import com.breadwallet.tools.manager.BRSharedPrefs; import com.breadwallet.tools.manager.SendManager; import com.breadwallet.tools.security.AuthManager; import com.breadwallet.tools.threads.executor.BRExecutor; import com.breadwallet.tools.util.BRConstants; import com.breadwallet.tools.util.Utils; import com.breadwallet.wallet.WalletsMaster; import com.breadwallet.wallet.abstracts.BaseWalletManager; import com.breadwallet.wallet.wallets.bitcoin.WalletBitcoinManager; import com.platform.APIClient; import com.platform.BRHTTPHelper; import com.platform.interfaces.Plugin; import com.platform.tools.BRBitId; import org.apache.commons.compress.utils.IOUtils; import org.eclipse.jetty.continuation.Continuation; import org.eclipse.jetty.continuation.ContinuationSupport; import org.eclipse.jetty.server.Request; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.IOException; import java.math.BigDecimal; import java.util.Currency; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * BreadWallet * <p/> * Created by Mihail Gutan on <[email protected]> 11/2/16. * Copyright (c) 2016 breadwallet LLC * <p/> * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * <p/> * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * <p/> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ public class WalletPlugin implements Plugin { public static final String TAG = WalletPlugin.class.getName(); private static Continuation continuation; private static Request globalBaseRequest; @Override public boolean handle(String target, final Request baseRequest, HttpServletRequest request, final HttpServletResponse response) { if (!target.startsWith("/_wallet")) return false; Activity app = (Activity) BreadApp.getBreadContext(); if (target.startsWith("/_wallet/info") && request.getMethod().equalsIgnoreCase("get")) { Log.i(TAG, "handling: " + target + " " + baseRequest.getMethod()); if (app == null) { Log.e(TAG, "handle: context is null: " + target + " " + baseRequest.getMethod()); return BRHTTPHelper.handleError(500, "context is null", baseRequest, response); } WalletsMaster wm = WalletsMaster.getInstance(app); BaseWalletManager w = WalletBitcoinManager.getInstance(app); JSONObject jsonResp = new JSONObject(); try { /**whether or not the users wallet is set up yet, or is currently locked*/ jsonResp.put("no_wallet", wm.noWalletForPlatform(app)); String addrs = BRSharedPrefs.getReceiveAddress(app, wm.getCurrentWallet(app).getIso(app)); if (Utils.isNullOrEmpty(addrs)) { BRReportsManager.reportBug(new NullPointerException("Address is null for simplex!")); Log.e(TAG, "handle: Address is null for simplex!"); addrs = wm.getCurrentWallet(app).getReceiveAddress(app).stringify(); } /**the current receive address*/ jsonResp.put("receive_address", w == null ? "" : w.getReceiveAddress(app).stringify()); /**how digits after the decimal point. 2 = bits 8 = btc 6 = mbtc*/ jsonResp.put("btc_denomiation_digits", w == null ? "" : w.getMaxDecimalPlaces(app)); /**the users native fiat currency as an ISO 4217 code. Should be uppercased */ jsonResp.put("local_currency_code", Currency.getInstance(Locale.getDefault()).getCurrencyCode().toUpperCase()); APIClient.BRResponse resp = new APIClient.BRResponse(jsonResp.toString().getBytes(), 200, "application/json"); return BRHTTPHelper.handleSuccess(resp, baseRequest, response); } catch (JSONException e) { e.printStackTrace(); Log.e(TAG, "handle: json error: " + target + " " + baseRequest.getMethod()); return BRHTTPHelper.handleError(500, "json error", baseRequest, response); } } else if (target.startsWith("/_wallet/_event") && request.getMethod().equalsIgnoreCase("get")) { Log.i(TAG, "handling: " + target + " " + baseRequest.getMethod()); byte[] rawData = BRHTTPHelper.getBody(request); String name = target.replace("/_event/", ""); Log.e(TAG, "handle: body: " + new String(rawData != null ? rawData : "null".getBytes())); JSONObject json = null; if (rawData != null) { try { json = new JSONObject(new String(rawData)); } catch (JSONException e) { e.printStackTrace(); } } if (json != null) { Map<String, String> attr = new HashMap<>(); while (json.keys().hasNext()) { String key = json.keys().next(); try { attr.put(key, json.getString(key)); } catch (JSONException e) { e.printStackTrace(); Log.e(TAG, String.format("Failed to get the key: %s, from json: %s", key, json.toString())); } } BREventManager.getInstance().pushEvent(name, attr); } else { BREventManager.getInstance().pushEvent(name); } APIClient.BRResponse resp = new APIClient.BRResponse(null, 200); return BRHTTPHelper.handleSuccess(resp, baseRequest, response); } else if (target.startsWith("/_wallet/sign_bitid") && request.getMethod().equalsIgnoreCase("post")) { Log.i(TAG, "handling: " + target + " " + baseRequest.getMethod()); /** * POST /_wallet/sign_bitid Sign a message using the user's BitID private key. Calling this WILL trigger authentication Request body: application/json { "prompt_string": "Sign in to My Service", // shown to the user in the authentication prompt "string_to_sign": "https://bitid.org/bitid?x=2783408723", // the string to sign "bitid_url": "https://bitid.org/bitid", // the bitid url for deriving the private key "bitid_index": "0" // the bitid index as a string (just pass "0") } Response body: application/json { "signature": "oibwaeofbawoefb" // base64-encoded signature } */ if (app == null) { Log.e(TAG, "handle: context is null: " + target + " " + baseRequest.getMethod()); return BRHTTPHelper.handleError(500, "context is null", baseRequest, response); } String contentType = request.getHeader("content-type"); if (contentType == null || !contentType.equalsIgnoreCase("application/json")) { Log.e(TAG, "handle: content type is not application/json: " + target + " " + baseRequest.getMethod()); return BRHTTPHelper.handleError(400, null, baseRequest, response); } String reqBody = null; try { reqBody = new String(IOUtils.toByteArray(request.getInputStream())); } catch (IOException e) { e.printStackTrace(); } if (Utils.isNullOrEmpty(reqBody)) { Log.e(TAG, "handle: reqBody is empty: " + target + " " + baseRequest.getMethod()); return BRHTTPHelper.handleError(400, null, baseRequest, response); } try { JSONObject obj = new JSONObject(reqBody); continuation = ContinuationSupport.getContinuation(request); continuation.suspend(response); globalBaseRequest = baseRequest; BRBitId.signBitID(app, null, obj); } catch (JSONException e) { e.printStackTrace(); Log.e(TAG, "handle: Failed to parse Json request body: " + target + " " + baseRequest.getMethod()); return BRHTTPHelper.handleError(400, "failed to parse json", baseRequest, response); } return true; } else if (target.startsWith("/_wallet/authenticate") && request.getMethod().equalsIgnoreCase("post")) { try { /** POST /_wallet/authenticate Verify that the current user is the wallet's owner. Post a request of {prompt: "Promt Text!", id: "<uuidv4>" }. Get back an { "authenticated": true } */ String reqBody = null; try { reqBody = new String(IOUtils.toByteArray(request.getInputStream())); } catch (IOException e) { e.printStackTrace(); } if (Utils.isNullOrEmpty(reqBody)) { Log.e(TAG, "handle: reqBody is empty: " + target + " " + baseRequest.getMethod()); return BRHTTPHelper.handleError(400, null, baseRequest, response); } JSONObject obj = new JSONObject(reqBody); String authText = obj.getString("prompt"); continuation = ContinuationSupport.getContinuation(request); continuation.suspend(response); globalBaseRequest = baseRequest; AuthManager.getInstance().authPrompt(app, authText, "", false, false, new BRAuthCompletion() { @Override public void onComplete() { BRExecutor.getInstance().forLightWeightBackgroundTasks().execute(new Runnable() { @Override public void run() { JSONObject obj = new JSONObject(); try { obj.put("authenticated", true); } catch (JSONException e) { e.printStackTrace(); } if (continuation != null) { APIClient.BRResponse resp = new APIClient.BRResponse(obj.toString().getBytes(), 200, "application/json"); BRHTTPHelper.handleSuccess(resp, globalBaseRequest, (HttpServletResponse) continuation.getServletResponse()); } cleanUp(); } }); } @Override public void onCancel() { BRExecutor.getInstance().forLightWeightBackgroundTasks().execute(new Runnable() { @Override public void run() { JSONObject obj = new JSONObject(); try { obj.put("authenticated", false); } catch (JSONException e) { e.printStackTrace(); } APIClient.BRResponse resp = new APIClient.BRResponse(obj.toString().getBytes(), 200, "application/json"); BRHTTPHelper.handleSuccess(resp, globalBaseRequest, (HttpServletResponse) continuation.getServletResponse()); cleanUp(); } }); } }); } catch (JSONException e) { e.printStackTrace(); Log.e(TAG, "handle: Failed to parse Json request body: " + target + " " + baseRequest.getMethod()); return BRHTTPHelper.handleError(400, "failed to parse json", baseRequest, response); } } else if (target.startsWith("/_wallet/currencies")) { JSONArray arr = getCurrencyData(app); if (arr.length() == 0) { BRReportsManager.reportBug(new IllegalArgumentException("_wallet/currencies created an empty json")); return BRHTTPHelper.handleError(500, "Failed to create json", baseRequest, response); } APIClient.BRResponse resp = new APIClient.BRResponse(arr.toString().getBytes(), 200, "application/json"); return BRHTTPHelper.handleSuccess(resp, baseRequest, response); } else if (target.startsWith("/_wallet/transaction")) { String reqBody = null; try { reqBody = new String(IOUtils.toByteArray(request.getInputStream())); } catch (IOException e) { e.printStackTrace(); } if (Utils.isNullOrEmpty(reqBody)) { Log.e(TAG, "handle: reqBody is empty: " + target + " " + baseRequest.getMethod()); return BRHTTPHelper.handleError(400, null, baseRequest, response); } try { JSONObject obj = new JSONObject(reqBody); sendTx(app, obj); continuation = ContinuationSupport.getContinuation(request); continuation.suspend(response); globalBaseRequest = baseRequest; return true; } catch (JSONException e) { e.printStackTrace(); } return BRHTTPHelper.handleError(500, "Invalid json request", baseRequest, response); } else if (target.startsWith("/_wallet/addresses")) { String iso = target.substring(target.lastIndexOf("/") + 1); BaseWalletManager w = WalletsMaster.getInstance(app).getWalletByIso(app, iso); if (w == null) { return BRHTTPHelper.handleError(500, "Invalid iso for address: " + iso, baseRequest, response); } JSONObject obj = new JSONObject(); try { obj.put("currency", w.getIso(app)); obj.put("address", w.getReceiveAddress(app).stringify()); } catch (JSONException e) { e.printStackTrace(); } APIClient.BRResponse resp = new APIClient.BRResponse(obj.toString().getBytes(), 200, "application/json"); return BRHTTPHelper.handleSuccess(resp, baseRequest, response); } Log.e(TAG, "handle: WALLET PLUGIN DID NOT HANDLE: " + target + " " + baseRequest.getMethod()); return true; } private void sendTx(final Context app, JSONObject obj) { String toAddress = null; String toDescription = null; String currency = null; String numerator = null; String denominator = null; String txCurrency = null; try { toAddress = obj.getString("toAddress"); toDescription = obj.getString("toDescription"); currency = obj.getString("currency"); JSONObject amount = obj.getJSONObject("amount"); numerator = amount.getString("numerator"); denominator = amount.getString("denominator"); txCurrency = amount.getString("currency"); } catch (JSONException e) { e.printStackTrace(); } Log.e(TAG, "sendTx: " + String.format("address (%s), description (%s), currency (%s), numerator (%s), denominator(%s), txCurrency(%s)", toAddress, toDescription, currency, numerator, denominator, txCurrency)); if (Utils.isNullOrEmpty(toAddress) || Utils.isNullOrEmpty(toDescription) || Utils.isNullOrEmpty(currency) || Utils.isNullOrEmpty(numerator) || Utils.isNullOrEmpty(denominator) || Utils.isNullOrEmpty(txCurrency)) { return; } final BaseWalletManager wm = WalletsMaster.getInstance(app).getWalletByIso(app, currency); String addr = wm.undecorateAddress(app, toAddress); if (Utils.isNullOrEmpty(addr)) { BRDialog.showSimpleDialog(app, "Failed to create tx for exchange!", "Invalid address: " + addr); return; } BigDecimal bigAmount = WalletsMaster.getInstance(app).isIsoErc20(app, currency) ? new BigDecimal(numerator).divide(new BigDecimal(denominator), nrOfZeros(denominator), BRConstants.ROUNDING_MODE) : new BigDecimal(numerator); final CryptoRequest item = new CryptoRequest(null, false, null, addr, bigAmount); BRExecutor.getInstance().forLightWeightBackgroundTasks().execute(new Runnable() { @Override public void run() { SendManager.sendTransaction(app, item, wm, new SendManager.SendCompletion() { @Override public void onCompleted(String hash, boolean succeed) { finalizeTx(succeed, hash); } }); } }); } private static int nrOfZeros(String n) { int count = 0; while (n.charAt(n.length() - 1) == '0') { n = new BigDecimal(n).divide(new BigDecimal(10)).toPlainString(); count++; } return count; } private JSONArray getCurrencyData(Context app) { JSONArray arr = new JSONArray(); List<BaseWalletManager> list = WalletsMaster.getInstance(app).getAllWallets(app); for (BaseWalletManager w : list) { JSONObject obj = new JSONObject(); try { obj.put("id", w.getIso(app)); obj.put("ticker", w.getIso(app)); obj.put("name", w.getName(app)); //Colors JSONArray colors = new JSONArray(); colors.put(w.getUiConfiguration().mStartColor); colors.put(w.getUiConfiguration().mEndColor); obj.put("colors", colors); //Balance //TODO Temporary solution due to the fact that the erc20 balances are stored in Decimals rather than smallest currency (WEI, SATOSHIS) JSONObject balance = new JSONObject(); boolean isErc20 = WalletsMaster.getInstance(app).isIsoErc20(app, w.getIso(app)); BigDecimal rawBalance = w.getCachedBalance(app); String denominator = w.getDenominator(app); balance.put("currency", w.getIso(app)); balance.put("numerator", isErc20 ? rawBalance.multiply(new BigDecimal(denominator)).toPlainString() : rawBalance.toPlainString()); balance.put("denominator", denominator); //Fiat balance JSONObject fiatBalance = new JSONObject(); fiatBalance.put("currency", BRSharedPrefs.getPreferredFiatIso(app)); fiatBalance.put("numerator", w.getFiatBalance(app).multiply(new BigDecimal(100)).toPlainString()); fiatBalance.put("denominator", String.valueOf(100)); //Exchange JSONObject exchange = new JSONObject(); exchange.put("currency", BRSharedPrefs.getPreferredFiatIso(app)); exchange.put("numerator", w.getFiatExchangeRate(app).multiply(new BigDecimal(100)).toPlainString()); exchange.put("denominator", String.valueOf(100)); obj.put("balance", balance); obj.put("fiatBalance", fiatBalance); obj.put("exchange", exchange); arr.put(obj); } catch (JSONException e) { e.printStackTrace(); } } return arr; } public static void finalizeTx(final boolean succeed, final String hash) { BRExecutor.getInstance().forLightWeightBackgroundTasks().execute(new Runnable() { @Override public void run() { try { if (!succeed || Utils.isNullOrEmpty(hash)) { try { ((HttpServletResponse) continuation.getServletResponse()).sendError(500); } catch (IOException e) { Log.e(TAG, "finalizeTx: failed to send error 500: ", e); e.printStackTrace(); } return; } if (continuation == null) { Log.e(TAG, "finalizeTx: WARNING continuation is null"); return; } JSONObject result = new JSONObject(); try { result.put("hash", hash); result.put("transmitted", true); } catch (JSONException e) { e.printStackTrace(); } try { continuation.getServletResponse().setContentType("application/json"); continuation.getServletResponse().setCharacterEncoding("UTF-8"); continuation.getServletResponse().getWriter().print(result.toString()); Log.d(TAG, "finalizeTx: finished with writing to the response: " + result); } catch (Exception e) { e.printStackTrace(); Log.e(TAG, "sendBitIdResponse Failed to send json: ", e); } ((HttpServletResponse) continuation.getServletResponse()).setStatus(200); } finally { cleanUp(); } } }); } public static void sendBitIdResponse(final JSONObject restJson, final boolean authenticated) { BRExecutor.getInstance().forBackgroundTasks().execute(new Runnable() { @Override public void run() { try { if (!authenticated) { try { ((HttpServletResponse) continuation.getServletResponse()).sendError(401); } catch (IOException e) { Log.e(TAG, "sendBitIdResponse: failed to send error 401: ", e); e.printStackTrace(); } return; } if (restJson == null || restJson.isNull("signature")) { Log.e(TAG, "sendBitIdResponse: WARNING restJson is null: " + restJson); try { ((HttpServletResponse) continuation.getServletResponse()).sendError(500, "json malformed or null"); } catch (IOException e) { Log.e(TAG, "sendBitIdResponse: failed to send error 401: ", e); e.printStackTrace(); } return; } if (continuation == null) { Log.e(TAG, "sendBitIdResponse: WARNING continuation is null"); return; } try { continuation.getServletResponse().setContentType("application/json"); continuation.getServletResponse().setCharacterEncoding("UTF-8"); continuation.getServletResponse().getWriter().print(restJson); Log.d(TAG, "sendBitIdResponse: finished with writing to the response: " + restJson); } catch (Exception e) { e.printStackTrace(); Log.e(TAG, "sendBitIdResponse Failed to send json: ", e); } ((HttpServletResponse) continuation.getServletResponse()).setStatus(200); } finally { cleanUp(); } } }); } private static void cleanUp() { if (globalBaseRequest != null) globalBaseRequest.setHandled(true); if (continuation != null) continuation.complete(); continuation = null; globalBaseRequest = null; } }
app/src/main/java/com/platform/middlewares/plugins/WalletPlugin.java
package com.platform.middlewares.plugins; import android.app.Activity; import android.content.Context; import android.util.Log; import com.breadwallet.BreadApp; import com.breadwallet.presenter.entities.CryptoRequest; import com.breadwallet.presenter.interfaces.BRAuthCompletion; import com.breadwallet.tools.animation.BRDialog; import com.breadwallet.tools.manager.BREventManager; import com.breadwallet.tools.manager.BRReportsManager; import com.breadwallet.tools.manager.BRSharedPrefs; import com.breadwallet.tools.manager.SendManager; import com.breadwallet.tools.security.AuthManager; import com.breadwallet.tools.threads.executor.BRExecutor; import com.breadwallet.tools.util.BRConstants; import com.breadwallet.tools.util.Utils; import com.breadwallet.wallet.WalletsMaster; import com.breadwallet.wallet.abstracts.BaseWalletManager; import com.breadwallet.wallet.wallets.bitcoin.WalletBitcoinManager; import com.platform.APIClient; import com.platform.BRHTTPHelper; import com.platform.interfaces.Plugin; import com.platform.tools.BRBitId; import org.apache.commons.compress.utils.IOUtils; import org.eclipse.jetty.continuation.Continuation; import org.eclipse.jetty.continuation.ContinuationSupport; import org.eclipse.jetty.server.Request; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.IOException; import java.math.BigDecimal; import java.util.Currency; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * BreadWallet * <p/> * Created by Mihail Gutan on <[email protected]> 11/2/16. * Copyright (c) 2016 breadwallet LLC * <p/> * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * <p/> * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * <p/> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ public class WalletPlugin implements Plugin { public static final String TAG = WalletPlugin.class.getName(); private static Continuation continuation; private static Request globalBaseRequest; @Override public boolean handle(String target, final Request baseRequest, HttpServletRequest request, final HttpServletResponse response) { if (!target.startsWith("/_wallet")) return false; Activity app = (Activity) BreadApp.getBreadContext(); if (target.startsWith("/_wallet/info") && request.getMethod().equalsIgnoreCase("get")) { Log.i(TAG, "handling: " + target + " " + baseRequest.getMethod()); if (app == null) { Log.e(TAG, "handle: context is null: " + target + " " + baseRequest.getMethod()); return BRHTTPHelper.handleError(500, "context is null", baseRequest, response); } WalletsMaster wm = WalletsMaster.getInstance(app); BaseWalletManager w = WalletBitcoinManager.getInstance(app); JSONObject jsonResp = new JSONObject(); try { /**whether or not the users wallet is set up yet, or is currently locked*/ jsonResp.put("no_wallet", wm.noWalletForPlatform(app)); String addrs = BRSharedPrefs.getReceiveAddress(app, wm.getCurrentWallet(app).getIso(app)); if (Utils.isNullOrEmpty(addrs)) { BRReportsManager.reportBug(new NullPointerException("Address is null for simplex!")); Log.e(TAG, "handle: Address is null for simplex!"); addrs = wm.getCurrentWallet(app).getReceiveAddress(app).stringify(); } /**the current receive address*/ jsonResp.put("receive_address", w == null ? "" : w.getReceiveAddress(app).stringify()); /**how digits after the decimal point. 2 = bits 8 = btc 6 = mbtc*/ jsonResp.put("btc_denomiation_digits", w == null ? "" : w.getMaxDecimalPlaces(app)); /**the users native fiat currency as an ISO 4217 code. Should be uppercased */ jsonResp.put("local_currency_code", Currency.getInstance(Locale.getDefault()).getCurrencyCode().toUpperCase()); APIClient.BRResponse resp = new APIClient.BRResponse(jsonResp.toString().getBytes(), 200, "application/json"); return BRHTTPHelper.handleSuccess(resp, baseRequest, response); } catch (JSONException e) { e.printStackTrace(); Log.e(TAG, "handle: json error: " + target + " " + baseRequest.getMethod()); return BRHTTPHelper.handleError(500, "json error", baseRequest, response); } } else if (target.startsWith("/_wallet/_event") && request.getMethod().equalsIgnoreCase("get")) { Log.i(TAG, "handling: " + target + " " + baseRequest.getMethod()); byte[] rawData = BRHTTPHelper.getBody(request); String name = target.replace("/_event/", ""); Log.e(TAG, "handle: body: " + new String(rawData != null ? rawData : "null".getBytes())); JSONObject json = null; if (rawData != null) { try { json = new JSONObject(new String(rawData)); } catch (JSONException e) { e.printStackTrace(); } } if (json != null) { Map<String, String> attr = new HashMap<>(); while (json.keys().hasNext()) { String key = json.keys().next(); try { attr.put(key, json.getString(key)); } catch (JSONException e) { e.printStackTrace(); Log.e(TAG, String.format("Failed to get the key: %s, from json: %s", key, json.toString())); } } BREventManager.getInstance().pushEvent(name, attr); } else { BREventManager.getInstance().pushEvent(name); } APIClient.BRResponse resp = new APIClient.BRResponse(null, 200); return BRHTTPHelper.handleSuccess(resp, baseRequest, response); } else if (target.startsWith("/_wallet/sign_bitid") && request.getMethod().equalsIgnoreCase("post")) { Log.i(TAG, "handling: " + target + " " + baseRequest.getMethod()); /** * POST /_wallet/sign_bitid Sign a message using the user's BitID private key. Calling this WILL trigger authentication Request body: application/json { "prompt_string": "Sign in to My Service", // shown to the user in the authentication prompt "string_to_sign": "https://bitid.org/bitid?x=2783408723", // the string to sign "bitid_url": "https://bitid.org/bitid", // the bitid url for deriving the private key "bitid_index": "0" // the bitid index as a string (just pass "0") } Response body: application/json { "signature": "oibwaeofbawoefb" // base64-encoded signature } */ if (app == null) { Log.e(TAG, "handle: context is null: " + target + " " + baseRequest.getMethod()); return BRHTTPHelper.handleError(500, "context is null", baseRequest, response); } String contentType = request.getHeader("content-type"); if (contentType == null || !contentType.equalsIgnoreCase("application/json")) { Log.e(TAG, "handle: content type is not application/json: " + target + " " + baseRequest.getMethod()); return BRHTTPHelper.handleError(400, null, baseRequest, response); } String reqBody = null; try { reqBody = new String(IOUtils.toByteArray(request.getInputStream())); } catch (IOException e) { e.printStackTrace(); } if (Utils.isNullOrEmpty(reqBody)) { Log.e(TAG, "handle: reqBody is empty: " + target + " " + baseRequest.getMethod()); return BRHTTPHelper.handleError(400, null, baseRequest, response); } try { JSONObject obj = new JSONObject(reqBody); continuation = ContinuationSupport.getContinuation(request); continuation.suspend(response); globalBaseRequest = baseRequest; BRBitId.signBitID(app, null, obj); } catch (JSONException e) { e.printStackTrace(); Log.e(TAG, "handle: Failed to parse Json request body: " + target + " " + baseRequest.getMethod()); return BRHTTPHelper.handleError(400, "failed to parse json", baseRequest, response); } return true; } else if (target.startsWith("/_wallet/authenticate") && request.getMethod().equalsIgnoreCase("post")) { try { /** POST /_wallet/authenticate Verify that the current user is the wallet's owner. Post a request of {prompt: "Promt Text!", id: "<uuidv4>" }. Get back an { "authenticated": true } */ String reqBody = null; try { reqBody = new String(IOUtils.toByteArray(request.getInputStream())); } catch (IOException e) { e.printStackTrace(); } if (Utils.isNullOrEmpty(reqBody)) { Log.e(TAG, "handle: reqBody is empty: " + target + " " + baseRequest.getMethod()); return BRHTTPHelper.handleError(400, null, baseRequest, response); } JSONObject obj = new JSONObject(reqBody); String authText = obj.getString("prompt"); continuation = ContinuationSupport.getContinuation(request); continuation.suspend(response); globalBaseRequest = baseRequest; AuthManager.getInstance().authPrompt(app, authText, "", false, false, new BRAuthCompletion() { @Override public void onComplete() { BRExecutor.getInstance().forLightWeightBackgroundTasks().execute(new Runnable() { @Override public void run() { JSONObject obj = new JSONObject(); try { obj.put("authenticated", true); } catch (JSONException e) { e.printStackTrace(); } if (continuation != null) { APIClient.BRResponse resp = new APIClient.BRResponse(obj.toString().getBytes(), 200, "application/json"); BRHTTPHelper.handleSuccess(resp, globalBaseRequest, (HttpServletResponse) continuation.getServletResponse()); } cleanUp(); } }); } @Override public void onCancel() { BRExecutor.getInstance().forLightWeightBackgroundTasks().execute(new Runnable() { @Override public void run() { JSONObject obj = new JSONObject(); try { obj.put("authenticated", false); } catch (JSONException e) { e.printStackTrace(); } APIClient.BRResponse resp = new APIClient.BRResponse(obj.toString().getBytes(), 200, "application/json"); BRHTTPHelper.handleSuccess(resp, globalBaseRequest, (HttpServletResponse) continuation.getServletResponse()); cleanUp(); } }); } }); } catch (JSONException e) { e.printStackTrace(); Log.e(TAG, "handle: Failed to parse Json request body: " + target + " " + baseRequest.getMethod()); return BRHTTPHelper.handleError(400, "failed to parse json", baseRequest, response); } } else if (target.startsWith("/_wallet/currencies")) { JSONArray arr = getCurrencyData(app); if (arr.length() == 0) { BRReportsManager.reportBug(new IllegalArgumentException("_wallet/currencies created an empty json")); return BRHTTPHelper.handleError(500, "Failed to create json", baseRequest, response); } APIClient.BRResponse resp = new APIClient.BRResponse(arr.toString().getBytes(), 200, "application/json"); return BRHTTPHelper.handleSuccess(resp, baseRequest, response); } else if (target.startsWith("/_wallet/transaction")) { String reqBody = null; try { reqBody = new String(IOUtils.toByteArray(request.getInputStream())); } catch (IOException e) { e.printStackTrace(); } if (Utils.isNullOrEmpty(reqBody)) { Log.e(TAG, "handle: reqBody is empty: " + target + " " + baseRequest.getMethod()); return BRHTTPHelper.handleError(400, null, baseRequest, response); } try { JSONObject obj = new JSONObject(reqBody); sendTx(app, obj); continuation = ContinuationSupport.getContinuation(request); continuation.suspend(response); globalBaseRequest = baseRequest; return true; } catch (JSONException e) { e.printStackTrace(); } return BRHTTPHelper.handleError(500, "Invalid json request", baseRequest, response); } else if (target.startsWith("/_wallet/addresses")) { String iso = target.substring(target.lastIndexOf("/") + 1); BaseWalletManager w = WalletsMaster.getInstance(app).getWalletByIso(app, iso); if (w == null) { return BRHTTPHelper.handleError(500, "Invalid iso for address: " + iso, baseRequest, response); } JSONObject obj = new JSONObject(); try { obj.put("currency", w.getIso(app)); obj.put("address", w.getReceiveAddress(app).stringify()); } catch (JSONException e) { e.printStackTrace(); } APIClient.BRResponse resp = new APIClient.BRResponse(obj.toString().getBytes(), 200, "application/json"); return BRHTTPHelper.handleSuccess(resp, baseRequest, response); } Log.e(TAG, "handle: WALLET PLUGIN DID NOT HANDLE: " + target + " " + baseRequest.getMethod()); return true; } private void sendTx(final Context app, JSONObject obj) { String toAddress = null; String toDescription = null; String currency = null; String numerator = null; String denominator = null; String txCurrency = null; try { toAddress = obj.getString("toAddress"); toDescription = obj.getString("toDescription"); currency = obj.getString("currency"); JSONObject amount = obj.getJSONObject("amount"); numerator = amount.getString("numerator"); denominator = amount.getString("denominator"); txCurrency = amount.getString("currency"); } catch (JSONException e) { e.printStackTrace(); } Log.e(TAG, "sendTx: " + String.format("address (%s), description (%s), currency (%s), numerator (%s), denominator(%s), txCurrency(%s)", toAddress, toDescription, currency, numerator, denominator, txCurrency)); if (Utils.isNullOrEmpty(toAddress) || Utils.isNullOrEmpty(toDescription) || Utils.isNullOrEmpty(currency) || Utils.isNullOrEmpty(numerator) || Utils.isNullOrEmpty(denominator) || Utils.isNullOrEmpty(txCurrency)) { return; } final BaseWalletManager wm = WalletsMaster.getInstance(app).getWalletByIso(app, currency); String addr = wm.undecorateAddress(app, toAddress); if (Utils.isNullOrEmpty(addr)) { BRDialog.showSimpleDialog(app, "Failed to create tx for exchange!", "Invalid address: " + addr); return; } BigDecimal bigAmount = WalletsMaster.getInstance(app).isIsoErc20(app, currency) ? new BigDecimal(numerator).divide(new BigDecimal(denominator), nrOfZeros(denominator), BRConstants.ROUNDING_MODE) : new BigDecimal(numerator); final CryptoRequest item = new CryptoRequest(null, false, null, addr, bigAmount); BRExecutor.getInstance().forLightWeightBackgroundTasks().execute(new Runnable() { @Override public void run() { SendManager.sendTransaction(app, item, wm, new SendManager.SendCompletion() { @Override public void onCompleted(String hash, boolean succeed) { finalizeTx(succeed, hash); } }); } }); } private static int nrOfZeros(String n) { int count = 0; while (n.charAt(n.length() - 1) == '0') { n = new BigDecimal(n).divide(new BigDecimal(10)).toPlainString(); count++; } return count; } private JSONArray getCurrencyData(Context app) { JSONArray arr = new JSONArray(); List<BaseWalletManager> list = WalletsMaster.getInstance(app).getAllWallets(app); for (BaseWalletManager w : list) { JSONObject obj = new JSONObject(); try { obj.put("id", w.getIso(app)); obj.put("ticker", w.getIso(app)); obj.put("name", w.getName(app)); //Colors JSONArray colors = new JSONArray(); colors.put(w.getUiConfiguration().mStartColor); colors.put(w.getUiConfiguration().mEndColor); obj.put("colors", colors); //Balance JSONObject balance = new JSONObject(); String denominator = w.getDenominator(app); balance.put("currency", w.getIso(app)); // TODO: cached balance is already multiplied by the denominator. Figure out why and fix. // balance.put("numerator", w.getCachedBalance(app).multiply(new BigDecimal(denominator)).toPlainString()); balance.put("numerator", w.getCachedBalance(app).toPlainString()); balance.put("denominator", denominator); //Fiat balance JSONObject fiatBalance = new JSONObject(); fiatBalance.put("currency", BRSharedPrefs.getPreferredFiatIso(app)); fiatBalance.put("numerator", w.getFiatBalance(app).multiply(new BigDecimal(100)).toPlainString()); fiatBalance.put("denominator", String.valueOf(100)); //Exchange JSONObject exchange = new JSONObject(); exchange.put("currency", BRSharedPrefs.getPreferredFiatIso(app)); exchange.put("numerator", w.getFiatExchangeRate(app).multiply(new BigDecimal(100)).toPlainString()); exchange.put("denominator", String.valueOf(100)); obj.put("balance", balance); obj.put("fiatBalance", fiatBalance); obj.put("exchange", exchange); arr.put(obj); } catch (JSONException e) { e.printStackTrace(); } } return arr; } public static void finalizeTx(final boolean succeed, final String hash) { BRExecutor.getInstance().forLightWeightBackgroundTasks().execute(new Runnable() { @Override public void run() { try { if (!succeed || Utils.isNullOrEmpty(hash)) { try { ((HttpServletResponse) continuation.getServletResponse()).sendError(500); } catch (IOException e) { Log.e(TAG, "finalizeTx: failed to send error 500: ", e); e.printStackTrace(); } return; } if (continuation == null) { Log.e(TAG, "finalizeTx: WARNING continuation is null"); return; } JSONObject result = new JSONObject(); try { result.put("hash", hash); result.put("transmitted", true); } catch (JSONException e) { e.printStackTrace(); } try { continuation.getServletResponse().setContentType("application/json"); continuation.getServletResponse().setCharacterEncoding("UTF-8"); continuation.getServletResponse().getWriter().print(result.toString()); Log.d(TAG, "finalizeTx: finished with writing to the response: " + result); } catch (Exception e) { e.printStackTrace(); Log.e(TAG, "sendBitIdResponse Failed to send json: ", e); } ((HttpServletResponse) continuation.getServletResponse()).setStatus(200); } finally { cleanUp(); } } }); } public static void sendBitIdResponse(final JSONObject restJson, final boolean authenticated) { BRExecutor.getInstance().forBackgroundTasks().execute(new Runnable() { @Override public void run() { try { if (!authenticated) { try { ((HttpServletResponse) continuation.getServletResponse()).sendError(401); } catch (IOException e) { Log.e(TAG, "sendBitIdResponse: failed to send error 401: ", e); e.printStackTrace(); } return; } if (restJson == null || restJson.isNull("signature")) { Log.e(TAG, "sendBitIdResponse: WARNING restJson is null: " + restJson); try { ((HttpServletResponse) continuation.getServletResponse()).sendError(500, "json malformed or null"); } catch (IOException e) { Log.e(TAG, "sendBitIdResponse: failed to send error 401: ", e); e.printStackTrace(); } return; } if (continuation == null) { Log.e(TAG, "sendBitIdResponse: WARNING continuation is null"); return; } try { continuation.getServletResponse().setContentType("application/json"); continuation.getServletResponse().setCharacterEncoding("UTF-8"); continuation.getServletResponse().getWriter().print(restJson); Log.d(TAG, "sendBitIdResponse: finished with writing to the response: " + restJson); } catch (Exception e) { e.printStackTrace(); Log.e(TAG, "sendBitIdResponse Failed to send json: ", e); } ((HttpServletResponse) continuation.getServletResponse()).setStatus(200); } finally { cleanUp(); } } }); } private static void cleanUp() { if (globalBaseRequest != null) globalBaseRequest.setHandled(true); if (continuation != null) continuation.complete(); continuation = null; globalBaseRequest = null; } }
temporary fix for non integer numerators
app/src/main/java/com/platform/middlewares/plugins/WalletPlugin.java
temporary fix for non integer numerators
Java
mit
44fc22d598604ddc4d4b1621c85de88cfd80e1af
0
Vectis99/sshs-2016-willamette
package inputHandler; /** * Breaks apart a String into usable subsections.<br> * Just static helper functions here. * @author South Salem High School * */ public class BreakString { /* * TODO: * -Remove junk (Whitespace or user defined) (Regex?) * -Separate by commas or other separators */ public static String trim(String clogged, String[] junk) { for(int i=0; i<junk.length; i++) { for(int k=0;k<clogged.length();k++) { if(clogged.charAt(k)==junk[i].charAt(0)) { for (int l=0;l<junk[i].length();l++) { //TODO left off here } } } } return ""; } }
src/inputHandler/BreakString.java
package inputHandler; /** * Breaks apart a String into usable subsections.<br> * Just static helper functions here. * @author South Salem High School * */ public class BreakString { /* * TODO: * -Remove junk (Whitespace or user defined) (Regex?) * -Separate by commas or other separators */ }
Testing a commit because Aaron's thing isn't working.
src/inputHandler/BreakString.java
Testing a commit because Aaron's thing isn't working.
Java
mit
5ad52813f28006be5f33f7ffd6abb5ca832f2998
0
CS2103JAN2017-W15-B2/main,CS2103JAN2017-W15-B2/main
//@@author A0162266E package guitests; import java.io.File; import java.io.IOException; import org.junit.Test; import werkbook.task.logic.commands.SaveCommand; public class SaveCommandTest extends TaskListGuiTest { @Test public void save_invalidPath_failure() { commandBox.runCommand("save \"\0\""); assertResultMessage(SaveCommand.MESSAGE_INVALID_PATH); } @Test public void save_nonExistentFolder_failure() { commandBox.runCommand("save src\\test\\data\\sandbox\\some_folder"); assertResultMessage(SaveCommand.MESSAGE_FOLDER_NOT_EXIST); } @Test public void save_notDirectory_failure() { File newFile = new File("src\\test\\data\\sandbox\\newFile"); try { newFile.createNewFile(); } catch (IOException e) { e.printStackTrace(); } commandBox.runCommand("save src\\test\\data\\sandbox\\newFile"); assertResultMessage(SaveCommand.MESSAGE_NOT_A_DIRECTORY); } @Test public void save_validDirectory_success() { File newFolder = new File("src\\test\\data\\sandbox\\newFolder"); newFolder.mkdir(); commandBox.runCommand("save src\\test\\data\\sandbox\\newFolder"); assertResultMessage(SaveCommand.MESSAGE_SUCCESS); } }
src/test/java/guitests/SaveCommandTest.java
//@@author A0162266E package guitests; import java.io.File; import java.io.IOException; import org.junit.Test; import werkbook.task.logic.commands.SaveCommand; public class SaveCommandTest extends TaskListGuiTest { @Test public void save_invalidPath_failure() { commandBox.runCommand("save \"\" "); assertResultMessage(SaveCommand.MESSAGE_INVALID_PATH); } @Test public void save_nonExistentFolder_failure() { commandBox.runCommand("save src\\test\\data\\sandbox\\some_folder"); assertResultMessage(SaveCommand.MESSAGE_FOLDER_NOT_EXIST); } @Test public void save_notDirectory_failure() { File newFile = new File("src\\test\\data\\sandbox\\newFile"); try { newFile.createNewFile(); } catch (IOException e) { e.printStackTrace(); } commandBox.runCommand("save src\\test\\data\\sandbox\\newFile"); assertResultMessage(SaveCommand.MESSAGE_NOT_A_DIRECTORY); } @Test public void save_validDirectory_success() { File newFolder = new File("src\\test\\data\\sandbox\\newFolder"); newFolder.mkdir(); commandBox.runCommand("save src\\test\\data\\sandbox\\newFolder"); assertResultMessage(SaveCommand.MESSAGE_SUCCESS); } }
Add null in invalid path test
src/test/java/guitests/SaveCommandTest.java
Add null in invalid path test
Java
mit
a67891d348079aaaf11ae924528c1d8f93a96323
0
armandgray/taapProject,armandgray/taapProject,armandgray/taapProject
package com.armandgray.taap; import android.content.Intent; import android.support.design.widget.FloatingActionButton; import android.support.v7.widget.SearchView; import android.support.v7.widget.Toolbar; import android.view.Menu; import android.view.View; import android.widget.Spinner; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.Robolectric; import org.robolectric.RobolectricTestRunner; import org.robolectric.annotation.Config; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertNotNull; import static junit.framework.Assert.assertTrue; import static org.robolectric.Shadows.shadowOf; @RunWith(RobolectricTestRunner.class) @Config(constants = BuildConfig.class) public class MainActivityViewsTest { private MainActivity activity; private MainActivityViews views; @Before public void setUp() { System.out.println("Running Set Up!"); activity = Robolectric.buildActivity(MainActivity.class).create().visible().get(); views = activity.controller.views; } @Test public void activityInstanceOfMainActivity_TestConstructor() throws Exception { assertEquals("MainActivity", views.activity.getLocalClassName()); } @Test public void doesSetContentView_MethodTest_SetupActivityInitialState() throws Exception { assertEquals(R.id.activityMainLayout, shadowOf(activity).getContentView().getId()); } @Test public void canGetToolbar__MethodTest_SetupToolbar() throws Exception { Toolbar toolbar = (Toolbar) activity.findViewById(R.id.toolbar); assertNotNull(toolbar); assertNotNull(activity.getSupportActionBar()); } @Test public void canGetOptionsMenu_TestOnCreate() throws Exception { Toolbar toolbar = (Toolbar) activity.findViewById(R.id.toolbar); shadowOf(activity).onCreateOptionsMenu(toolbar.getMenu()); assertNotNull(shadowOf(activity).getOptionsMenu()); } @Test public void canSelectOptionsMenuItem_Search() throws Exception { Toolbar toolbar = (Toolbar) activity.findViewById(R.id.toolbar); shadowOf(activity).onCreateOptionsMenu(toolbar.getMenu()); Menu optionsMenu = shadowOf(activity).getOptionsMenu(); assertTrue(activity.onOptionsItemSelected(optionsMenu.findItem(R.id.action_search))); } @Test public void canShowSearchViewOnMenuItemClick() throws Exception { SearchView searchView = (SearchView) activity.findViewById(R.id.searchView); Toolbar toolbar = (Toolbar) activity.findViewById(R.id.toolbar); shadowOf(activity).onCreateOptionsMenu(toolbar.getMenu()); Menu optionsMenu = shadowOf(activity).getOptionsMenu(); assertTrue(activity.onOptionsItemSelected(optionsMenu.findItem(R.id.action_search))); assertEquals(View.VISIBLE, searchView.getVisibility()); } @Test public void canHideSpinnerAndFabOnMenuItemClick() throws Exception { Spinner spinner = (Spinner) activity.findViewById(R.id.spDrillsSort); FloatingActionButton fab = (FloatingActionButton) activity.findViewById(R.id.fab); Toolbar toolbar = (Toolbar) activity.findViewById(R.id.toolbar); shadowOf(activity).onCreateOptionsMenu(toolbar.getMenu()); Menu optionsMenu = shadowOf(activity).getOptionsMenu(); assertTrue(activity.onOptionsItemSelected(optionsMenu.findItem(R.id.action_search))); assertEquals(View.GONE, spinner.getVisibility()); assertEquals(View.GONE, fab.getVisibility()); } @Test public void canSelectOptionsMenuItem_Log() throws Exception { Toolbar toolbar = (Toolbar) activity.findViewById(R.id.toolbar); shadowOf(activity).onCreateOptionsMenu(toolbar.getMenu()); Menu optionsMenu = shadowOf(activity).getOptionsMenu(); assertTrue(activity.onOptionsItemSelected(optionsMenu.findItem(R.id.action_log))); } @Test public void canStartActivityOnLogMenuItemClick() throws Exception { Toolbar toolbar = (Toolbar) activity.findViewById(R.id.toolbar); shadowOf(activity).onCreateOptionsMenu(toolbar.getMenu()); Menu optionsMenu = shadowOf(activity).getOptionsMenu(); assertTrue(activity.onOptionsItemSelected(optionsMenu.findItem(R.id.action_log))); Intent expectedIntent = new Intent(); assertEquals(expectedIntent.toString(), shadowOf(activity).getNextStartedActivity().toString()); } @Test public void hasViewFab_MethodTest_SetupFabClickListener() throws Exception { FloatingActionButton fab = (FloatingActionButton) activity.findViewById(R.id.fab); assertNotNull(fab); } @Test public void testSpinnerHasEntries_MethodTest_SetupSortAndSearch() throws Exception { Spinner spinner = (Spinner) activity.findViewById(R.id.spDrillsSort); assertNotNull(spinner); assertTrue(spinner.getCount() > 0); } @Test public void doesHideSearchView_MethodTest_SetupSortAndSearch() throws Exception { SearchView searchView = (SearchView) activity.findViewById(R.id.searchView); assertEquals(View.GONE, searchView.getVisibility()); } @After public void tearDown() { System.out.println("Running TearDown!"); activity = null; views = null; } }
TAAP/app/src/test/java/com/armandgray/taap/MainActivityViewsTest.java
package com.armandgray.taap; import android.content.Intent; import android.support.design.widget.FloatingActionButton; import android.support.v7.widget.SearchView; import android.support.v7.widget.Toolbar; import android.view.Menu; import android.view.View; import android.widget.Spinner; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.Robolectric; import org.robolectric.RobolectricTestRunner; import org.robolectric.annotation.Config; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertNotNull; import static junit.framework.Assert.assertTrue; import static org.robolectric.Shadows.shadowOf; @RunWith(RobolectricTestRunner.class) @Config(constants = BuildConfig.class) public class MainActivityViewsTest { private MainActivity activity; private MainActivityViews views; @Before public void setUp() { System.out.println("Running Set Up!"); activity = Robolectric.buildActivity(MainActivity.class).create().visible().get(); views = activity.controller.views; } @Test public void activityInstanceOfMainActivity_TestConstructor() throws Exception { assertEquals("MainActivity", views.activity.getLocalClassName()); } @Test public void doesSetContentView_MethodTest_SetupActivityInitialState() throws Exception { assertEquals(R.id.activityMainLayout, shadowOf(activity).getContentView().getId()); } @Test public void canGetToolbar__MethodTest_SetupToolbar() throws Exception { Toolbar toolbar = (Toolbar) activity.findViewById(R.id.toolbar); assertNotNull(toolbar); assertNotNull(activity.getSupportActionBar()); } @Test public void canGetOptionsMenu_TestOnCreate() throws Exception { Toolbar toolbar = (Toolbar) activity.findViewById(R.id.toolbar); shadowOf(activity).onCreateOptionsMenu(toolbar.getMenu()); assertNotNull(shadowOf(activity).getOptionsMenu()); } @Test public void canSelectOptionsMenuItem_Search() throws Exception { Toolbar toolbar = (Toolbar) activity.findViewById(R.id.toolbar); shadowOf(activity).onCreateOptionsMenu(toolbar.getMenu()); Menu optionsMenu = shadowOf(activity).getOptionsMenu(); assertTrue(activity.onOptionsItemSelected(optionsMenu.findItem(R.id.action_search))); } @Test public void canShowSearchViewOnMenuItemClick() throws Exception { SearchView searchView = (SearchView) activity.findViewById(R.id.searchView); Toolbar toolbar = (Toolbar) activity.findViewById(R.id.toolbar); shadowOf(activity).onCreateOptionsMenu(toolbar.getMenu()); Menu optionsMenu = shadowOf(activity).getOptionsMenu(); assertTrue(activity.onOptionsItemSelected(optionsMenu.findItem(R.id.action_search))); assertEquals(View.VISIBLE, searchView.getVisibility()); } @Test public void canHideSpinnerAndFabOnMenuItemClick() throws Exception { Spinner spinner = (Spinner) activity.findViewById(R.id.spDrillsSort); FloatingActionButton fab = (FloatingActionButton) activity.findViewById(R.id.fab); Toolbar toolbar = (Toolbar) activity.findViewById(R.id.toolbar); shadowOf(activity).onCreateOptionsMenu(toolbar.getMenu()); Menu optionsMenu = shadowOf(activity).getOptionsMenu(); assertTrue(activity.onOptionsItemSelected(optionsMenu.findItem(R.id.action_search))); assertEquals(View.GONE, spinner.getVisibility()); assertEquals(View.GONE, fab.getVisibility()); } @Test public void canSelectOptionsMenuItem_Log() throws Exception { Toolbar toolbar = (Toolbar) activity.findViewById(R.id.toolbar); shadowOf(activity).onCreateOptionsMenu(toolbar.getMenu()); Menu optionsMenu = shadowOf(activity).getOptionsMenu(); assertTrue(activity.onOptionsItemSelected(optionsMenu.findItem(R.id.action_log))); } @Test public void canStartActivityOnLogMenuItemClick() throws Exception { Toolbar toolbar = (Toolbar) activity.findViewById(R.id.toolbar); shadowOf(activity).onCreateOptionsMenu(toolbar.getMenu()); Menu optionsMenu = shadowOf(activity).getOptionsMenu(); assertTrue(activity.onOptionsItemSelected(optionsMenu.findItem(R.id.action_log))); Intent expectedIntent = new Intent(); assertEquals(expectedIntent.toString(), shadowOf(activity).getNextStartedActivity().toString()); } @Test public void hasViewFab_MethodTest_SetupFabClickListener() throws Exception { FloatingActionButton fab = (FloatingActionButton) activity.findViewById(R.id.fab); assertNotNull(fab); } @Test public void testSpinnerHasEntries_MethodTest_SetupSortAndSearch() throws Exception { Spinner spinner = (Spinner) activity.findViewById(R.id.spDrillsSort); assertNotNull(spinner); assertTrue(spinner.getCount() > 0); } @Test public void doesHideSearchView_TestOnCreate() throws Exception { SearchView searchView = (SearchView) activity.findViewById(R.id.searchView); assertEquals(View.GONE, searchView.getVisibility()); } @After public void tearDown() { System.out.println("Running TearDown!"); activity = null; views = null; } }
renamed ViewTest based on View class methods
TAAP/app/src/test/java/com/armandgray/taap/MainActivityViewsTest.java
renamed ViewTest based on View class methods
Java
mit
7570b1c4b327978e5ad0b9e3b344a1d67289e04f
0
fiveham/Sudoku_Solver
package sudoku; import java.util.Collection; public abstract class Fact extends NodeSet<Claim,Fact>{ /** * */ private static final long serialVersionUID = -3547362110024521237L; /** * <p>The number ({@value}) of elements (neighbors) of a Rule when * the Claim satisfying the Rule has been completely * identified.</p> */ public static final int SIZE_WHEN_SOLVED = 1; /** * <p>The number ({@value}) of elements (neighbors) of a Rule that * is a {@code xor}. A Rule is a {@code xor} if it has two Claims, * because exactly one of them is true and the other is false. Given * that the Rule is satisfied (the Rule is {@code true}), the Claims, * as inputs to the Rule, make such a Rule a {@code xor} operation on * its neighbors.</p> */ public static final int SIZE_WHEN_XOR = 2; protected Fact(Puzzle puzzle, Collection<Claim> c, int hash) { super(puzzle, hash); addAll(c); } @Override public boolean equals(Object o){ if(o instanceof Fact){ Fact r = (Fact) o; return r.puzzle.equals(puzzle) && super.equals(r); } return false; } public boolean insideVisible(){ return visible().stream().anyMatch((v) -> v.containsAll(this)); } public boolean isSolved(){ return size() == SIZE_WHEN_SOLVED; } public boolean isXor(){ return size() == SIZE_WHEN_XOR; } }
src/sudoku/Fact.java
package sudoku; import java.util.Collection; public abstract class Fact extends NodeSet<Claim,Fact>{ /** * */ private static final long serialVersionUID = -3547362110024521237L; /** * <p>The number ({@value}) of elements (neighbors) of a Rule when * the Claim satisfying the Rule has been completely * identified.</p> */ public static final int SIZE_WHEN_SOLVED = 1; /** * <p>The number ({@value}) of elements (neighbors) of a Rule that * is a {@code xor}. A Rule is a {@code xor} if it has two Claims, * because exactly one of them is true and the other is false. Given * that the Rule is satisfied (the Rule is {@code true}), the Claims, * as inputs to the Rule, make such a Rule a {@code xor} operation on * its neighbors.</p> */ public static final int SIZE_WHEN_XOR = 2; protected Fact(Puzzle puzzle, Collection<Claim> c, int hash) { super(puzzle, hash); addAll(c); } @Override public boolean equals(Object o){ if(o instanceof Fact){ Fact r = (Fact) o; return r.puzzle.equals(puzzle) && super.equals(r); } return false; } public boolean isSolved(){ return size() == SIZE_WHEN_SOLVED; } public boolean isXor(){ return size() == SIZE_WHEN_XOR; } }
rename insideNeighbor
src/sudoku/Fact.java
rename insideNeighbor
Java
lgpl-2.1
96f3d8e58e20a14c94f7edafdc465fcf0d7c0cc7
0
threerings/narya,threerings/narya,threerings/narya,threerings/narya,threerings/narya
// // $Id$ // // Narya library - tools for developing networked games // Copyright (C) 2002-2004 Three Rings Design, Inc., All Rights Reserved // http://www.threerings.net/code/narya/ // // This library is free software; you can redistribute it and/or modify it // under the terms of the GNU Lesser General Public License as published // by the Free Software Foundation; either version 2.1 of the License, or // (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA package com.threerings.whirled.spot.client; import com.samskivert.util.ResultListener; import com.samskivert.util.StringUtil; import com.threerings.presents.client.BasicDirector; import com.threerings.presents.client.Client; import com.threerings.presents.client.InvocationService.ConfirmListener; import com.threerings.presents.data.ClientObject; import com.threerings.presents.dobj.AttributeChangeListener; import com.threerings.presents.dobj.AttributeChangedEvent; import com.threerings.presents.dobj.DObject; import com.threerings.presents.dobj.DObjectManager; import com.threerings.presents.dobj.ObjectAccessException; import com.threerings.presents.dobj.Subscriber; import com.threerings.crowd.chat.client.ChatDirector; import com.threerings.crowd.chat.data.ChatCodes; import com.threerings.crowd.client.LocationAdapter; import com.threerings.crowd.client.LocationDirector; import com.threerings.crowd.data.PlaceObject; import com.threerings.whirled.client.SceneDirector; import com.threerings.whirled.data.SceneModel; import com.threerings.whirled.data.ScenedBodyObject; import com.threerings.whirled.util.WhirledContext; import com.threerings.whirled.spot.Log; import com.threerings.whirled.spot.data.ClusteredBodyObject; import com.threerings.whirled.spot.data.Location; import com.threerings.whirled.spot.data.Portal; import com.threerings.whirled.spot.data.SpotCodes; import com.threerings.whirled.spot.data.SpotScene; /** * Extends the standard scene director with facilities to move between * locations within a scene. */ public class SpotSceneDirector extends BasicDirector implements SpotCodes, Subscriber, AttributeChangeListener { /** * Creates a new spot scene director with the specified context and * which will cooperate with the supplied scene director. * * @param ctx the active client context. * @param locdir the location director with which we will be * cooperating. * @param scdir the scene director with which we will be cooperating. */ public SpotSceneDirector (WhirledContext ctx, LocationDirector locdir, SceneDirector scdir) { super(ctx); _ctx = ctx; _scdir = scdir; // wire ourselves up to hear about leave place notifications locdir.addLocationObserver(new LocationAdapter() { public void locationDidChange (PlaceObject place) { // we need to clear some things out when we leave a place handleDeparture(); } }); } /** * Configures this spot scene director with a chat director, with * which it will coordinate to implement cluster chatting. */ public void setChatDirector (ChatDirector chatdir) { _chatdir = chatdir; } /** * Returns our current location unless we have a location change * pending, in which case our pending location is returned. */ public Location getIntendedLocation () { return (_pendingLoc != null) ? _pendingLoc : _location; } /** * Requests that this client move to the location specified by the * supplied portal id. A request will be made and when the response is * received, the location observers will be notified of success or * failure. * * @return true if the request was issued, false if it was rejected by * a location observer or because we have another request outstanding. */ public boolean traversePortal (int portalId) { return traversePortal(portalId, null); } /** * Requests that this client move to the location specified by the * supplied portal id. A request will be made and when the response is * received, the location observers will be notified of success or * failure. */ public boolean traversePortal (int portalId, ResultListener rl) { // look up the destination scene and location SpotScene scene = (SpotScene)_scdir.getScene(); if (scene == null) { Log.warning("Requested to traverse portal when we have " + "no scene [portalId=" + portalId + "]."); return false; } // sanity check the server's notion of what scene we're in with // our notion of it int sceneId = _scdir.getScene().getId(); ScenedBodyObject sbobj = (ScenedBodyObject) _ctx.getClient().getClientObject(); if (sceneId != sbobj.getSceneId()) { Log.warning("Client and server differ in opinion of what scene " + "we're in [sSceneId=" + sbobj.getSceneId() + ", cSceneId=" + sceneId + "]."); return false; } // find the portal they're talking about Portal dest = scene.getPortal(portalId); if (dest == null) { Log.warning("Requested to traverse non-existent portal " + "[portalId=" + portalId + ", portals=" + StringUtil.toString(scene.getPortals()) + "]."); return false; } // prepare to move to this scene (sets up pending data) if (!_scdir.prepareMoveTo(dest.targetSceneId, rl)) { Log.info("Portal traversal vetoed by scene director " + "[portalId=" + portalId + "]."); return false; } // check the version of our cached copy of the scene to which // we're requesting to move; if we were unable to load it, assume // a cached version of zero int sceneVer = 0; SceneModel pendingModel = _scdir.getPendingModel(); if (pendingModel != null) { sceneVer = pendingModel.version; } // issue a traversePortal request Log.info("Issuing traversePortal(" + sceneId + ", " + dest + ", " + sceneVer + ")."); _sservice.traversePortal( _ctx.getClient(), sceneId, portalId, sceneVer, _scdir); return true; } /** * Issues a request to change our location within the scene to the * specified location. * * @param loc the new location to which to move. * @param listener will be notified of success or failure. Most client * entities find out about location changes via changes to the * occupant info data, but the initiator of a location change request * can be notified of its success or failure, primarily so that it can * act in anticipation of a successful location change (like by * starting a sprite moving toward the new location), but backtrack if * it finds out that the location change failed. */ public void changeLocation (Location loc, final ResultListener listener) { // refuse if there's a pending location change or if we're already // at the specified location if (loc.equivalent(_location)) { Log.info("Not going to " + loc + "; we're at " + _location + " and we're headed to " + _pendingLoc + "."); if (listener != null) { // This isn't really a failure, it's just a no-op. listener.requestCompleted(_location); } return; } if (_pendingLoc != null) { Log.info("Not going to " + loc + "; we're at " + _location + " and we're headed to " + _pendingLoc + "."); if (listener != null) { // Already moving, best thing to do is ignore it. listener.requestCompleted(_pendingLoc); } return; } SpotScene scene = (SpotScene)_scdir.getScene(); if (scene == null) { Log.warning("Requested to change locations, but we're not " + "currently in any scene [loc=" + loc + "]."); if (listener != null) { listener.requestFailed(new Exception("m.cant_get_there")); } return; } int sceneId = _scdir.getScene().getId(); Log.info("Sending changeLocation request [scid=" + sceneId + ", loc=" + loc + "]."); _pendingLoc = (Location)loc.clone(); ConfirmListener clist = new ConfirmListener() { public void requestProcessed () { _location = _pendingLoc; _pendingLoc = null; if (listener != null) { listener.requestCompleted(_location); } } public void requestFailed (String reason) { _pendingLoc = null; if (listener != null) { listener.requestFailed(new Exception(reason)); } } }; _sservice.changeLocation(_ctx.getClient(), sceneId, loc, clist); } /** * Issues a request to join the cluster associated with the specified * user (starting one if necessary). * * @param froid the bodyOid of another user; the calling user will * be made to join the target user's cluster. * @param listener will be notified of success or failure. */ public void joinCluster (int froid, final ResultListener listener) { SpotScene scene = (SpotScene)_scdir.getScene(); if (scene == null) { Log.warning("Requested to join cluster, but we're not " + "currently in any scene [froid=" + froid + "]."); if (listener != null) { listener.requestFailed(new Exception("m.cant_get_there")); } return; } Log.info("Joining cluster [friend=" + froid + "]."); _sservice.joinCluster(_ctx.getClient(), froid, new ConfirmListener() { public void requestProcessed () { if (listener != null) { listener.requestCompleted(null); } } public void requestFailed (String reason) { if (listener != null) { listener.requestFailed(new Exception(reason)); } } }); } /** * Sends a chat message to the other users in the cluster to which the * location that we currently occupy belongs. * * @return true if a cluster speak message was delivered, false if we * are not in a valid cluster and refused to deliver the request. */ public boolean requestClusterSpeak (String message) { return requestClusterSpeak(message, ChatCodes.DEFAULT_MODE); } /** * Sends a chat message to the other users in the cluster to which the * location that we currently occupy belongs. * * @return true if a cluster speak message was delivered, false if we * are not in a valid cluster and refused to deliver the request. */ public boolean requestClusterSpeak (String message, byte mode) { // make sure we're currently in a scene SpotScene scene = (SpotScene)_scdir.getScene(); if (scene == null) { Log.warning("Requested to speak to cluster, but we're not " + "currently in any scene [message=" + message + "]."); return false; } // make sure we're part of a cluster if (_self.getClusterOid() <= 0) { Log.info("Ignoring cluster speak as we're not in a cluster " + "[cloid=" + _self.getClusterOid() + "]."); return false; } message = _chatdir.filter(message, null, true); if (message != null) { _sservice.clusterSpeak(_ctx.getClient(), message, mode); } return true; } // documentation inherited from interface public void objectAvailable (DObject object) { clearCluster(false); int oid = object.getOid(); if (oid != _self.getClusterOid()) { // we got it too late, just unsubscribe DObjectManager omgr = _ctx.getDObjectManager(); omgr.unsubscribeFromObject(oid, this); } else { // it's our new cluster! _clobj = object; if (_chatdir != null) { _chatdir.addAuxiliarySource(object, CLUSTER_CHAT_TYPE); } } } // documentation inherited from interface public void requestFailed (int oid, ObjectAccessException cause) { Log.warning("Unable to subscribe to cluster chat object " + "[oid=" + oid + ", cause=" + cause + "]."); } // documentation inherited from interface public void attributeChanged (AttributeChangedEvent event) { if (event.getName().equals(_self.getClusterField()) && !event.getValue().equals(event.getOldValue())) { maybeUpdateCluster(); } } // documentation inherited public void clientDidLogon (Client client) { super.clientDidLogon(client); ClientObject clientObj = client.getClientObject(); if (clientObj instanceof ClusteredBodyObject) { // listen to the client object clientObj.addListener(this); _self = (ClusteredBodyObject) clientObj; // we may need to subscribe to a cluster due to session resumption maybeUpdateCluster(); } } // documentation inherited public void clientObjectDidChange (Client client) { super.clientObjectDidChange(client); // listen to the client object ClientObject clientObj = client.getClientObject(); clientObj.addListener(this); _self = (ClusteredBodyObject) clientObj; } // documentation inherited public void clientDidLogoff (Client client) { super.clientDidLogoff(client); // clear out our business _location = null; _pendingLoc = null; _sservice = null; clearCluster(true); // stop listening to the client object client.getClientObject().removeListener(this); _self = null; } // documentation inherited protected void fetchServices (Client client) { _sservice = (SpotService)client.requireService(SpotService.class); } /** * Clean up after a few things when we depart from a scene. */ protected void handleDeparture () { // clear out our last known location id _location = null; } /** * Checks to see if our cluster has changed and does the necessary * subscription machinations if necessary. */ protected void maybeUpdateCluster () { int cloid = _self.getClusterOid(); if ((_clobj == null && cloid <= 0) || (_clobj != null && cloid == _clobj.getOid())) { // our cluster didn't change, we can stop now return; } // clear out any old cluster object clearCluster(false); // if there's a new cluster object, subscribe to it if (_chatdir != null && cloid > 0) { DObjectManager omgr = _ctx.getDObjectManager(); // we'll wire up to the chat director when this completes omgr.subscribeToObject(cloid, this); } } /** * Convenience routine to unwire chat for and unsubscribe from our * current cluster, if any. * * @param force clear the cluster even if we're still apparently in it. */ protected void clearCluster (boolean force) { if (_clobj != null && (force || (_clobj.getOid() != _self.getClusterOid()))) { if (_chatdir != null) { _chatdir.removeAuxiliarySource(_clobj); } DObjectManager omgr = _ctx.getDObjectManager(); omgr.unsubscribeFromObject(_clobj.getOid(), this); _clobj = null; } } /** The active client context. */ protected WhirledContext _ctx; /** Access to spot scene services. */ protected SpotService _sservice; /** The scene director with which we are cooperating. */ protected SceneDirector _scdir; /** A casted reference to our clustered body object. */ protected ClusteredBodyObject _self; /** A reference to the chat director with which we coordinate. */ protected ChatDirector _chatdir; /** The location we currently occupy. */ protected Location _location; /** The location to which we have an outstanding change location * request. */ protected Location _pendingLoc; /** The cluster chat object for the cluster we currently occupy. */ protected DObject _clobj; }
src/java/com/threerings/whirled/spot/client/SpotSceneDirector.java
// // $Id$ // // Narya library - tools for developing networked games // Copyright (C) 2002-2004 Three Rings Design, Inc., All Rights Reserved // http://www.threerings.net/code/narya/ // // This library is free software; you can redistribute it and/or modify it // under the terms of the GNU Lesser General Public License as published // by the Free Software Foundation; either version 2.1 of the License, or // (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA package com.threerings.whirled.spot.client; import com.samskivert.util.ResultListener; import com.samskivert.util.StringUtil; import com.threerings.presents.client.BasicDirector; import com.threerings.presents.client.Client; import com.threerings.presents.client.InvocationService.ConfirmListener; import com.threerings.presents.data.ClientObject; import com.threerings.presents.server.InvocationException; import com.threerings.presents.dobj.AttributeChangeListener; import com.threerings.presents.dobj.AttributeChangedEvent; import com.threerings.presents.dobj.DObject; import com.threerings.presents.dobj.DObjectManager; import com.threerings.presents.dobj.ObjectAccessException; import com.threerings.presents.dobj.Subscriber; import com.threerings.crowd.chat.client.ChatDirector; import com.threerings.crowd.chat.data.ChatCodes; import com.threerings.crowd.client.LocationAdapter; import com.threerings.crowd.client.LocationDirector; import com.threerings.crowd.data.PlaceObject; import com.threerings.whirled.client.SceneDirector; import com.threerings.whirled.data.SceneModel; import com.threerings.whirled.data.ScenedBodyObject; import com.threerings.whirled.util.WhirledContext; import com.threerings.whirled.spot.Log; import com.threerings.whirled.spot.data.ClusteredBodyObject; import com.threerings.whirled.spot.data.Location; import com.threerings.whirled.spot.data.Portal; import com.threerings.whirled.spot.data.SpotCodes; import com.threerings.whirled.spot.data.SpotScene; /** * Extends the standard scene director with facilities to move between * locations within a scene. */ public class SpotSceneDirector extends BasicDirector implements SpotCodes, Subscriber, AttributeChangeListener { /** * Creates a new spot scene director with the specified context and * which will cooperate with the supplied scene director. * * @param ctx the active client context. * @param locdir the location director with which we will be * cooperating. * @param scdir the scene director with which we will be cooperating. */ public SpotSceneDirector (WhirledContext ctx, LocationDirector locdir, SceneDirector scdir) { super(ctx); _ctx = ctx; _scdir = scdir; // wire ourselves up to hear about leave place notifications locdir.addLocationObserver(new LocationAdapter() { public void locationDidChange (PlaceObject place) { // we need to clear some things out when we leave a place handleDeparture(); } }); } /** * Configures this spot scene director with a chat director, with * which it will coordinate to implement cluster chatting. */ public void setChatDirector (ChatDirector chatdir) { _chatdir = chatdir; } /** * Returns our current location unless we have a location change * pending, in which case our pending location is returned. */ public Location getIntendedLocation () { return (_pendingLoc != null) ? _pendingLoc : _location; } /** * Requests that this client move to the location specified by the * supplied portal id. A request will be made and when the response is * received, the location observers will be notified of success or * failure. * * @return true if the request was issued, false if it was rejected by * a location observer or because we have another request outstanding. */ public boolean traversePortal (int portalId) { return traversePortal(portalId, null); } /** * Requests that this client move to the location specified by the * supplied portal id. A request will be made and when the response is * received, the location observers will be notified of success or * failure. */ public boolean traversePortal (int portalId, ResultListener rl) { // look up the destination scene and location SpotScene scene = (SpotScene)_scdir.getScene(); if (scene == null) { Log.warning("Requested to traverse portal when we have " + "no scene [portalId=" + portalId + "]."); return false; } // sanity check the server's notion of what scene we're in with // our notion of it int sceneId = _scdir.getScene().getId(); ScenedBodyObject sbobj = (ScenedBodyObject) _ctx.getClient().getClientObject(); if (sceneId != sbobj.getSceneId()) { Log.warning("Client and server differ in opinion of what scene " + "we're in [sSceneId=" + sbobj.getSceneId() + ", cSceneId=" + sceneId + "]."); return false; } // find the portal they're talking about Portal dest = scene.getPortal(portalId); if (dest == null) { Log.warning("Requested to traverse non-existent portal " + "[portalId=" + portalId + ", portals=" + StringUtil.toString(scene.getPortals()) + "]."); return false; } // prepare to move to this scene (sets up pending data) if (!_scdir.prepareMoveTo(dest.targetSceneId, rl)) { Log.info("Portal traversal vetoed by scene director " + "[portalId=" + portalId + "]."); return false; } // check the version of our cached copy of the scene to which // we're requesting to move; if we were unable to load it, assume // a cached version of zero int sceneVer = 0; SceneModel pendingModel = _scdir.getPendingModel(); if (pendingModel != null) { sceneVer = pendingModel.version; } // issue a traversePortal request Log.info("Issuing traversePortal(" + sceneId + ", " + dest + ", " + sceneVer + ")."); _sservice.traversePortal( _ctx.getClient(), sceneId, portalId, sceneVer, _scdir); return true; } /** * Issues a request to change our location within the scene to the * specified location. * * @param loc the new location to which to move. * @param listener will be notified of success or failure. Most client * entities find out about location changes via changes to the * occupant info data, but the initiator of a location change request * can be notified of its success or failure, primarily so that it can * act in anticipation of a successful location change (like by * starting a sprite moving toward the new location), but backtrack if * it finds out that the location change failed. */ public void changeLocation (Location loc, final ResultListener listener) { // refuse if there's a pending location change or if we're already // at the specified location if (loc.equivalent(_location)) { Log.info("Not going to " + loc + "; we're at " + _location + " and we're headed to " + _pendingLoc + "."); if (listener != null) { // This isn't really a failure, it's just a no-op. listener.requestCompleted(_location); } return; } if (_pendingLoc != null) { Log.info("Not going to " + loc + "; we're at " + _location + " and we're headed to " + _pendingLoc + "."); if (listener != null) { // Already moving, best thing to do is ignore it. listener.requestCompleted(_pendingLoc); } return; } SpotScene scene = (SpotScene)_scdir.getScene(); if (scene == null) { Log.warning("Requested to change locations, but we're not " + "currently in any scene [loc=" + loc + "]."); if (listener != null) { listener.requestFailed( new InvocationException("m.cant_get_there")); } return; } int sceneId = _scdir.getScene().getId(); Log.info("Sending changeLocation request [scid=" + sceneId + ", loc=" + loc + "]."); _pendingLoc = (Location)loc.clone(); ConfirmListener clist = new ConfirmListener() { public void requestProcessed () { _location = _pendingLoc; _pendingLoc = null; if (listener != null) { listener.requestCompleted(_location); } } public void requestFailed (String reason) { _pendingLoc = null; if (listener != null) { listener.requestFailed(new InvocationException(reason)); } } }; _sservice.changeLocation(_ctx.getClient(), sceneId, loc, clist); } /** * Issues a request to join the cluster associated with the specified * user (starting one if necessary). * * @param froid the bodyOid of another user; the calling user will * be made to join the target user's cluster. * @param listener will be notified of success or failure. */ public void joinCluster (int froid, final ResultListener listener) { SpotScene scene = (SpotScene)_scdir.getScene(); if (scene == null) { Log.warning("Requested to join cluster, but we're not " + "currently in any scene [froid=" + froid + "]."); if (listener != null) { listener.requestFailed(new Exception("m.cant_get_there")); } return; } Log.info("Joining cluster [friend=" + froid + "]."); _sservice.joinCluster(_ctx.getClient(), froid, new ConfirmListener() { public void requestProcessed () { if (listener != null) { listener.requestCompleted(null); } } public void requestFailed (String reason) { if (listener != null) { listener.requestFailed(new Exception(reason)); } } }); } /** * Sends a chat message to the other users in the cluster to which the * location that we currently occupy belongs. * * @return true if a cluster speak message was delivered, false if we * are not in a valid cluster and refused to deliver the request. */ public boolean requestClusterSpeak (String message) { return requestClusterSpeak(message, ChatCodes.DEFAULT_MODE); } /** * Sends a chat message to the other users in the cluster to which the * location that we currently occupy belongs. * * @return true if a cluster speak message was delivered, false if we * are not in a valid cluster and refused to deliver the request. */ public boolean requestClusterSpeak (String message, byte mode) { // make sure we're currently in a scene SpotScene scene = (SpotScene)_scdir.getScene(); if (scene == null) { Log.warning("Requested to speak to cluster, but we're not " + "currently in any scene [message=" + message + "]."); return false; } // make sure we're part of a cluster if (_self.getClusterOid() <= 0) { Log.info("Ignoring cluster speak as we're not in a cluster " + "[cloid=" + _self.getClusterOid() + "]."); return false; } message = _chatdir.filter(message, null, true); if (message != null) { _sservice.clusterSpeak(_ctx.getClient(), message, mode); } return true; } // documentation inherited from interface public void objectAvailable (DObject object) { clearCluster(false); int oid = object.getOid(); if (oid != _self.getClusterOid()) { // we got it too late, just unsubscribe DObjectManager omgr = _ctx.getDObjectManager(); omgr.unsubscribeFromObject(oid, this); } else { // it's our new cluster! _clobj = object; if (_chatdir != null) { _chatdir.addAuxiliarySource(object, CLUSTER_CHAT_TYPE); } } } // documentation inherited from interface public void requestFailed (int oid, ObjectAccessException cause) { Log.warning("Unable to subscribe to cluster chat object " + "[oid=" + oid + ", cause=" + cause + "]."); } // documentation inherited from interface public void attributeChanged (AttributeChangedEvent event) { if (event.getName().equals(_self.getClusterField()) && !event.getValue().equals(event.getOldValue())) { maybeUpdateCluster(); } } // documentation inherited public void clientDidLogon (Client client) { super.clientDidLogon(client); ClientObject clientObj = client.getClientObject(); if (clientObj instanceof ClusteredBodyObject) { // listen to the client object clientObj.addListener(this); _self = (ClusteredBodyObject) clientObj; // we may need to subscribe to a cluster due to session resumption maybeUpdateCluster(); } } // documentation inherited public void clientObjectDidChange (Client client) { super.clientObjectDidChange(client); // listen to the client object ClientObject clientObj = client.getClientObject(); clientObj.addListener(this); _self = (ClusteredBodyObject) clientObj; } // documentation inherited public void clientDidLogoff (Client client) { super.clientDidLogoff(client); // clear out our business _location = null; _pendingLoc = null; _sservice = null; clearCluster(true); // stop listening to the client object client.getClientObject().removeListener(this); _self = null; } // documentation inherited protected void fetchServices (Client client) { _sservice = (SpotService)client.requireService(SpotService.class); } /** * Clean up after a few things when we depart from a scene. */ protected void handleDeparture () { // clear out our last known location id _location = null; } /** * Checks to see if our cluster has changed and does the necessary * subscription machinations if necessary. */ protected void maybeUpdateCluster () { int cloid = _self.getClusterOid(); if ((_clobj == null && cloid <= 0) || (_clobj != null && cloid == _clobj.getOid())) { // our cluster didn't change, we can stop now return; } // clear out any old cluster object clearCluster(false); // if there's a new cluster object, subscribe to it if (_chatdir != null && cloid > 0) { DObjectManager omgr = _ctx.getDObjectManager(); // we'll wire up to the chat director when this completes omgr.subscribeToObject(cloid, this); } } /** * Convenience routine to unwire chat for and unsubscribe from our * current cluster, if any. * * @param force clear the cluster even if we're still apparently in it. */ protected void clearCluster (boolean force) { if (_clobj != null && (force || (_clobj.getOid() != _self.getClusterOid()))) { if (_chatdir != null) { _chatdir.removeAuxiliarySource(_clobj); } DObjectManager omgr = _ctx.getDObjectManager(); omgr.unsubscribeFromObject(_clobj.getOid(), this); _clobj = null; } } /** The active client context. */ protected WhirledContext _ctx; /** Access to spot scene services. */ protected SpotService _sservice; /** The scene director with which we are cooperating. */ protected SceneDirector _scdir; /** A casted reference to our clustered body object. */ protected ClusteredBodyObject _self; /** A reference to the chat director with which we coordinate. */ protected ChatDirector _chatdir; /** The location we currently occupy. */ protected Location _location; /** The location to which we have an outstanding change location * request. */ protected Location _pendingLoc; /** The cluster chat object for the cluster we currently occupy. */ protected DObject _clobj; }
Avoid pointless usage of a server class. The first clue should have been that it was a server class... ;) git-svn-id: a1a4b28b82a3276cc491891159dd9963a0a72fae@4142 542714f4-19e9-0310-aa3c-eee0fc999fb1
src/java/com/threerings/whirled/spot/client/SpotSceneDirector.java
Avoid pointless usage of a server class. The first clue should have been that it was a server class... ;)
Java
apache-2.0
63ab46d318d06b8ee460d407723150326ca0610b
0
jnidzwetzki/bboxdb,jnidzwetzki/bboxdb,jnidzwetzki/bboxdb,jnidzwetzki/scalephant,jnidzwetzki/scalephant
/******************************************************************************* * * Copyright (C) 2015-2017 the BBoxDB project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *******************************************************************************/ package org.bboxdb.performance.osm; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileWriter; import java.io.IOException; import java.io.Writer; import java.sql.SQLException; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import org.bboxdb.performance.experiments.DetermineSamplingSize; import org.bboxdb.performance.osm.filter.OSMTagEntityFilter; import org.bboxdb.performance.osm.filter.multipoint.OSMBuildingsEntityFilter; import org.bboxdb.performance.osm.filter.multipoint.OSMRoadsEntityFilter; import org.bboxdb.performance.osm.filter.multipoint.OSMWaterEntityFilter; import org.bboxdb.performance.osm.filter.singlepoint.OSMTrafficSignalEntityFilter; import org.bboxdb.performance.osm.filter.singlepoint.OSMTreeEntityFilter; import org.bboxdb.performance.osm.util.Polygon; import org.bboxdb.performance.osm.util.SerializableNode; import org.bboxdb.performance.osm.util.SerializerHelper; import org.openstreetmap.osmosis.core.container.v0_6.EntityContainer; import org.openstreetmap.osmosis.core.domain.v0_6.Node; import org.openstreetmap.osmosis.core.domain.v0_6.Tag; import org.openstreetmap.osmosis.core.domain.v0_6.Way; import org.openstreetmap.osmosis.core.domain.v0_6.WayNode; import org.openstreetmap.osmosis.core.task.v0_6.Sink; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import crosby.binary.osmosis.OsmosisReader; public class OSMConverter implements Runnable, Sink { /** * The file to import */ protected final String filename; /** * The output dir */ protected final String output; /** * The node serializer */ protected final SerializerHelper<Polygon> serializerHelper = new SerializerHelper<>(); /** * The number of processed elements */ protected long processedElements = 0; /** * The filter */ protected final static Map<OSMType, OSMTagEntityFilter> filter = new HashMap<>(); /** * The output stream map */ protected final Map<OSMType, Writer> writerMap = new HashMap<>(); /** * The performance timestamp */ protected long lastPerformaceTimestamp = 0; /** * Conversion begin */ protected final long beginTimestamp; protected final OSMNodeStore osmNodeStore; /** * The Logger */ private final static Logger logger = LoggerFactory.getLogger(DetermineSamplingSize.class); static { filter.put(OSMType.TREE, new OSMTreeEntityFilter()); filter.put(OSMType.TRAFFIC_SIGNAL, new OSMTrafficSignalEntityFilter()); filter.put(OSMType.ROAD, new OSMRoadsEntityFilter()); filter.put(OSMType.BUILDING, new OSMBuildingsEntityFilter()); filter.put(OSMType.WATER, new OSMWaterEntityFilter()); } public OSMConverter(final String filename, final String workfolder, final String output) { this.filename = filename; this.output = output; this.beginTimestamp = System.currentTimeMillis(); final File workfoderDir = new File(workfolder); workfoderDir.mkdirs(); final File inputFile = new File(filename); // 5 instances per GB final int instances = (int) (inputFile.length() / (1024)^3) * 5; System.out.println("Using DB instances: " + instances); this.osmNodeStore = new OSMNodeStore(Arrays.asList(workfolder), instances); } @Override public void run() { try { // Open file handles for(final OSMType osmType : filter.keySet()) { final BufferedWriter bw = new BufferedWriter(new FileWriter(new File(output + File.separator + osmType.toString()))); writerMap.put(osmType, bw); } System.out.format("Importing %s\n", filename); final OsmosisReader reader = new OsmosisReader(new FileInputStream(filename)); reader.setSink(this); reader.run(); System.out.format("Imported %d objects\n", processedElements); // Close file handles for(final Writer writer : writerMap.values()) { writer.close(); } writerMap.clear(); osmNodeStore.close(); } catch (IOException e) { logger.error("Got an exception during import", e); } } @Override public void initialize(Map<String, Object> metaData) { } @Override public void complete() { } @Override public void release() { } @Override public void process(final EntityContainer entityContainer) { if(processedElements % 10000 == 0) { double performanceLast = 0; double performanceTotal = processedElements / ((System.currentTimeMillis() - beginTimestamp) / 1000.0); if(lastPerformaceTimestamp != 0) { performanceLast = 10000.0 / ((System.currentTimeMillis() - lastPerformaceTimestamp) / 1000.0); } final String performanceTotalString = String.format("%.2f", performanceTotal); final String performanceLastString = String.format("%.2f", performanceLast); logger.info("Processing element {} / Elements per Sec {} / Total elements per Sec {}", processedElements, performanceLastString, performanceTotalString); lastPerformaceTimestamp = System.currentTimeMillis(); } if(entityContainer.getEntity() instanceof Node) { handleNode(entityContainer); } else if(entityContainer.getEntity() instanceof Way) { handleWay(entityContainer); } processedElements++; } /** * Handle a node * @param entityContainer */ protected void handleNode(final EntityContainer entityContainer) { try { final Node node = (Node) entityContainer.getEntity(); for(final OSMType osmType : filter.keySet()) { final OSMTagEntityFilter entityFilter = filter.get(osmType); if(entityFilter.match(node.getTags())) { final Polygon geometricalStructure = new Polygon(node.getId()); geometricalStructure.addPoint(node.getLatitude(), node.getLongitude()); for(final Tag tag : node.getTags()) { geometricalStructure.addProperty(tag.getKey(), tag.getValue()); } final Writer writer = writerMap.get(osmType); writer.write(geometricalStructure.toGeoJson()); writer.write("\n"); } } osmNodeStore.storeNode(node); } catch (SQLException | IOException e) { throw new RuntimeException(e); } } /** * Handle a way * @param entityContainer */ protected void handleWay(final EntityContainer entityContainer) { try { final Way way = (Way) entityContainer.getEntity(); for(final OSMType osmType : filter.keySet()) { final OSMTagEntityFilter entityFilter = filter.get(osmType); if(entityFilter.match(way.getTags())) { final Polygon geometricalStructure = new Polygon(way.getId()); for(final WayNode wayNode : way.getWayNodes()) { final SerializableNode node = osmNodeStore.getNodeForId(wayNode.getNodeId()); geometricalStructure.addPoint(node.getLatitude(), node.getLongitude()); } for(final Tag tag : way.getTags()) { geometricalStructure.addProperty(tag.getKey(), tag.getValue()); } final Writer writer = writerMap.get(osmType); writer.write(geometricalStructure.toGeoJson()); writer.write("\n"); } } } catch (SQLException | IOException e) { throw new RuntimeException(e); } } /** * ==================================================== * Main * Main * Main * Main * Main * ==================================================== */ public static void main(final String[] args) { // Check parameter if(args.length != 3) { System.err.println("Usage: programm <filename> <work folder> <output dir>"); System.exit(-1); } final String filename = args[0]; final String workfolder = args[1]; final String output = args[2]; // Check file final File inputFile = new File(filename); if(! inputFile.isFile()) { System.err.println("Unable to open file: " + filename); System.exit(-1); } // Check output dir final File outputDir = new File(output); if(outputDir.exists() ) { System.err.println("Output dir already exist, please remove first"); System.exit(-1); } if(! outputDir.mkdirs() ) { System.err.println("Unable to create " + output); System.exit(-1); } // Check database file final File file = new File(workfolder); if(file.exists()) { System.err.println("Work folder already exists, exiting..."); System.exit(-1); } final OSMConverter determineSamplingSize = new OSMConverter(filename, workfolder, output); determineSamplingSize.run(); } }
src/main/java/org/bboxdb/performance/osm/OSMConverter.java
/******************************************************************************* * * Copyright (C) 2015-2017 the BBoxDB project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *******************************************************************************/ package org.bboxdb.performance.osm; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileWriter; import java.io.IOException; import java.io.Writer; import java.sql.SQLException; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import org.bboxdb.performance.experiments.DetermineSamplingSize; import org.bboxdb.performance.osm.filter.OSMTagEntityFilter; import org.bboxdb.performance.osm.filter.multipoint.OSMBuildingsEntityFilter; import org.bboxdb.performance.osm.filter.multipoint.OSMRoadsEntityFilter; import org.bboxdb.performance.osm.filter.multipoint.OSMWaterEntityFilter; import org.bboxdb.performance.osm.filter.singlepoint.OSMTrafficSignalEntityFilter; import org.bboxdb.performance.osm.filter.singlepoint.OSMTreeEntityFilter; import org.bboxdb.performance.osm.util.Polygon; import org.bboxdb.performance.osm.util.SerializableNode; import org.bboxdb.performance.osm.util.SerializerHelper; import org.openstreetmap.osmosis.core.container.v0_6.EntityContainer; import org.openstreetmap.osmosis.core.domain.v0_6.Node; import org.openstreetmap.osmosis.core.domain.v0_6.Tag; import org.openstreetmap.osmosis.core.domain.v0_6.Way; import org.openstreetmap.osmosis.core.domain.v0_6.WayNode; import org.openstreetmap.osmosis.core.task.v0_6.Sink; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import crosby.binary.osmosis.OsmosisReader; public class OSMConverter implements Runnable, Sink { /** * The file to import */ protected final String filename; /** * The output dir */ protected final String output; /** * The node serializer */ protected final SerializerHelper<Polygon> serializerHelper = new SerializerHelper<>(); /** * The number of processed elements */ protected long processedElements = 0; /** * The filter */ protected final static Map<OSMType, OSMTagEntityFilter> filter = new HashMap<>(); /** * The output stream map */ protected final Map<OSMType, Writer> writerMap = new HashMap<>(); /** * The performance timestamp */ protected long lastPerformaceTimestamp = 0; /** * Conversion begin */ protected final long beginTimestamp; protected final OSMNodeStore osmNodeStore; /** * The Logger */ private final static Logger logger = LoggerFactory.getLogger(DetermineSamplingSize.class); static { filter.put(OSMType.TREE, new OSMTreeEntityFilter()); filter.put(OSMType.TRAFFIC_SIGNAL, new OSMTrafficSignalEntityFilter()); filter.put(OSMType.ROAD, new OSMRoadsEntityFilter()); filter.put(OSMType.BUILDING, new OSMBuildingsEntityFilter()); filter.put(OSMType.WATER, new OSMWaterEntityFilter()); } public OSMConverter(final String filename, final String workfolder, final String output) { this.filename = filename; this.output = output; this.beginTimestamp = System.currentTimeMillis(); final File workfoderDir = new File(workfolder); workfoderDir.mkdirs(); final File inputFile = new File(filename); // One instance per GB final int instances = (int) (inputFile.length() / (1024^3)); System.out.println("Using DB instances: " + instances); this.osmNodeStore = new OSMNodeStore(Arrays.asList(workfolder), instances); } @Override public void run() { try { // Open file handles for(final OSMType osmType : filter.keySet()) { final BufferedWriter bw = new BufferedWriter(new FileWriter(new File(output + File.separator + osmType.toString()))); writerMap.put(osmType, bw); } System.out.format("Importing %s\n", filename); final OsmosisReader reader = new OsmosisReader(new FileInputStream(filename)); reader.setSink(this); reader.run(); System.out.format("Imported %d objects\n", processedElements); // Close file handles for(final Writer writer : writerMap.values()) { writer.close(); } writerMap.clear(); osmNodeStore.close(); } catch (IOException e) { logger.error("Got an exception during import", e); } } @Override public void initialize(Map<String, Object> metaData) { } @Override public void complete() { } @Override public void release() { } @Override public void process(final EntityContainer entityContainer) { if(processedElements % 10000 == 0) { double performanceLast = 0; double performanceTotal = processedElements / ((System.currentTimeMillis() - beginTimestamp) / 1000.0); if(lastPerformaceTimestamp != 0) { performanceLast = 10000.0 / ((System.currentTimeMillis() - lastPerformaceTimestamp) / 1000.0); } final String performanceTotalString = String.format("%.2f", performanceTotal); final String performanceLastString = String.format("%.2f", performanceLast); logger.info("Processing element {} / Elements per Sec {} / Total elements per Sec {}", processedElements, performanceLastString, performanceTotalString); lastPerformaceTimestamp = System.currentTimeMillis(); } if(entityContainer.getEntity() instanceof Node) { handleNode(entityContainer); } else if(entityContainer.getEntity() instanceof Way) { handleWay(entityContainer); } processedElements++; } /** * Handle a node * @param entityContainer */ protected void handleNode(final EntityContainer entityContainer) { try { final Node node = (Node) entityContainer.getEntity(); for(final OSMType osmType : filter.keySet()) { final OSMTagEntityFilter entityFilter = filter.get(osmType); if(entityFilter.match(node.getTags())) { final Polygon geometricalStructure = new Polygon(node.getId()); geometricalStructure.addPoint(node.getLatitude(), node.getLongitude()); for(final Tag tag : node.getTags()) { geometricalStructure.addProperty(tag.getKey(), tag.getValue()); } final Writer writer = writerMap.get(osmType); writer.write(geometricalStructure.toGeoJson()); writer.write("\n"); } } osmNodeStore.storeNode(node); } catch (SQLException | IOException e) { throw new RuntimeException(e); } } /** * Handle a way * @param entityContainer */ protected void handleWay(final EntityContainer entityContainer) { try { final Way way = (Way) entityContainer.getEntity(); for(final OSMType osmType : filter.keySet()) { final OSMTagEntityFilter entityFilter = filter.get(osmType); if(entityFilter.match(way.getTags())) { final Polygon geometricalStructure = new Polygon(way.getId()); for(final WayNode wayNode : way.getWayNodes()) { final SerializableNode node = osmNodeStore.getNodeForId(wayNode.getNodeId()); geometricalStructure.addPoint(node.getLatitude(), node.getLongitude()); } for(final Tag tag : way.getTags()) { geometricalStructure.addProperty(tag.getKey(), tag.getValue()); } final Writer writer = writerMap.get(osmType); writer.write(geometricalStructure.toGeoJson()); writer.write("\n"); } } } catch (SQLException | IOException e) { throw new RuntimeException(e); } } /** * ==================================================== * Main * Main * Main * Main * Main * ==================================================== */ public static void main(final String[] args) { // Check parameter if(args.length != 3) { System.err.println("Usage: programm <filename> <work folder> <output dir>"); System.exit(-1); } final String filename = args[0]; final String workfolder = args[1]; final String output = args[2]; // Check file final File inputFile = new File(filename); if(! inputFile.isFile()) { System.err.println("Unable to open file: " + filename); System.exit(-1); } // Check output dir final File outputDir = new File(output); if(outputDir.exists() ) { System.err.println("Output dir already exist, please remove first"); System.exit(-1); } if(! outputDir.mkdirs() ) { System.err.println("Unable to create " + output); System.exit(-1); } // Check database file final File file = new File(workfolder); if(file.exists()) { System.err.println("Work folder already exists, exiting..."); System.exit(-1); } final OSMConverter determineSamplingSize = new OSMConverter(filename, workfolder, output); determineSamplingSize.run(); } }
Changed amount of instances
src/main/java/org/bboxdb/performance/osm/OSMConverter.java
Changed amount of instances
Java
apache-2.0
b308ff911b31bc19cf77f52234758fa4f5afeb3d
0
leonardvandriel/Scrabble
package net.sf.scrabble.core; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; /** * The allowed characters in the alphabet. */ public class Alphabet { public static final int EMPTY_VALUE = -1; public static final int JOKER_VALUE = -2; private List<Integer> codeList = new LinkedList<Integer>(); private Map<Integer, Integer> codeMap = new HashMap<Integer, Integer>(); private int jokerCode; private int emptyCode; public Alphabet(int jokerCode, int emptyCode) { this.jokerCode = jokerCode; this.emptyCode = emptyCode; } public void addCodePoint(int code) { Integer boxed = Integer.valueOf(code); if (codeList.contains(boxed)) { throw new RuntimeException("Code already in alphabet: " + code); } codeMap.put(boxed, Integer.valueOf(codeList.size())); codeList.add(boxed); } public int getSize() { return codeList.size(); } public int getValueForCode(int code) { return codeMap.get(Integer.valueOf(code)).intValue(); } public int getValueForToken(int code) { if (code == jokerCode) { return JOKER_VALUE; } if (code == emptyCode) { return EMPTY_VALUE; } return getValueForCode(code); } public int getCodeForValue(int value) { return codeList.get(value).intValue(); } public int getTokenForValue(int value) { switch (value) { case JOKER_VALUE: return jokerCode; case EMPTY_VALUE: return emptyCode; default: return getCodeForValue(value); } } public String frequencyToString(int[] frequency) { StringBuilder builder = new StringBuilder(frequency[getSize()]); for (int i = 0; i < getSize(); i++) { for (int j = 0; j < frequency[i]; j++) { builder.appendCodePoint(getCodeForValue(i)); } } return builder.toString(); } public String sequenceToString(int[] sequence) { StringBuilder builder = new StringBuilder(sequence.length); for (int i = 0; i < sequence.length; i++) { builder.appendCodePoint(getCodeForValue(sequence[i])); } return builder.toString(); } public void checkLegalTokens(String tokens) { for (int i = 0; i < tokens.length(); i++) { getValueForToken(tokens.codePointAt(i)); } } public int getFrequencyAndJoker(String tokens, int[] freqArray) { int jokers = 0; for (int i = 0; i < tokens.length(); i++) { int code = tokens.codePointAt(i); if (code == jokerCode) { jokers++; } else { freqArray[getValueForCode(code)]++; } } freqArray[getSize()] = tokens.length() - jokers; return jokers; } public String toString(Word word) { int[] valueArray = word.valueArray; StringBuilder builder = new StringBuilder(valueArray.length); for (int i = 0; i < valueArray.length; i++) { builder.appendCodePoint(getCodeForValue(valueArray[i])); } return builder.toString(); } }
src/net/sf/scrabble/core/Alphabet.java
package net.sf.scrabble.core; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; /** * The allowed characters in the alphabet. */ public class Alphabet { public static final int EMPTY_VALUE = -1; public static final int JOKER_VALUE = -2; private List<Integer> codeList = new LinkedList<Integer>(); private Map<Integer, Integer> codeMap = new HashMap<Integer, Integer>(); private int jokerCode; private int emptyCode; public Alphabet(int jokerCode, int emptyCode) { this.jokerCode = jokerCode; this.emptyCode = emptyCode; } public void addCodePoint(int code) { Integer boxed = Integer.valueOf(code); if (codeList.contains(boxed)) { throw new RuntimeException("Code already in alphabet: " + code); } codeMap.put(boxed, Integer.valueOf(codeList.size())); codeList.add(boxed); } public int getSize() { return codeList.size(); } public int getValueForCode(int code) { return codeMap.get(Integer.valueOf(code)).intValue(); } public int getValueForToken(int code) { if (code == jokerCode) { return JOKER_VALUE; } if (code == emptyCode) { return EMPTY_VALUE; } return getValueForCode(code); } public int getCodeForValue(int value) { return codeList.get(value).intValue(); } public int getTokenForValue(int value) { switch (value) { case JOKER_VALUE: return jokerCode; case EMPTY_VALUE: return emptyCode; default: return getCodeForValue(value); } } public String frequencyToString(int[] frequency) { StringBuilder builder = new StringBuilder(frequency[getSize()]); for (int i = 0; i < getSize(); i++) { for (int j = 0; j < frequency[i]; j++) { builder.appendCodePoint(getCodeForValue(i)); } } return builder.toString(); } public String sequenceToString(int[] sequence) { StringBuilder builder = new StringBuilder(sequence.length); for (int i = 0; i < sequence.length; i++) { builder.appendCodePoint(getCodeForValue(sequence[i])); } return builder.toString(); } public void checkLegalTokens(String tokens) { for (int i = 0; i < tokens.length(); i++) { getValueForCode(tokens.codePointAt(i)); } } public int getFrequencyAndJoker(String tokens, int[] freqArray) { int jokers = 0; for (int i = 0; i < tokens.length(); i++) { int code = tokens.codePointAt(i); if (code == emptyCode) { jokers++; } else { freqArray[getValueForCode(code)]++; } } freqArray[getSize()] = tokens.length() - jokers; return jokers; } public String toString(Word word) { int[] valueArray = word.valueArray; StringBuilder builder = new StringBuilder(valueArray.length); for (int i = 0; i < valueArray.length; i++) { builder.appendCodePoint(getCodeForValue(valueArray[i])); } return builder.toString(); } }
Fixed usage of jokers in rack
src/net/sf/scrabble/core/Alphabet.java
Fixed usage of jokers in rack
Java
apache-2.0
95061332ffaa4819f864fa0bbed12a052f107e61
0
ModdyLP/MoMuOSB
package storage; import main.Fast; import util.Console; /** * Created by N.Hartmann on 29.06.2017. * Copyright 2017 */ public class LanguageLoader implements Fast{ public String LANG; private String DEF_LANG = "lang_en.json"; public String ERROR = ":warning: "; private static LanguageLoader instance; /** * Get Instance * @return Class Instance */ public static LanguageLoader getInstance() { if (instance == null) { instance = new LanguageLoader(); } return instance; } /** * Create Language Files */ public void createTranslations() { if (DRIVER.getProperty(DRIVER.CONFIG, "language", "en").equals("en")) { LANG = DEF_LANG; } else { LANG = "lang_"+ DRIVER.getProperty(DRIVER.CONFIG, "language", "en").equals("en")+".json"; DRIVER.createNewFile(LANG); } DRIVER.createNewFile(DEF_LANG); } /** * Get an Translation String * @param option option * @return String */ public String getTranslation(String option) { return DRIVER.getProperty(LANG, option, DRIVER.getLangProperty(DEF_LANG, option).toString()).toString(); } /** * Save Default Config to File */ public void setDefaultLanguage() { Console.debug("DefaultLanguage Loading for Fallback"); //Errors DRIVER.setProperty(DEF_LANG, "common_error", "There was an error!"); DRIVER.setProperty(DEF_LANG, "commonmessage_error", "There was an error! Error: %1s"); DRIVER.setProperty(DEF_LANG, "annotation_error", "Invalid Annotation in Module %1s Ex: %2s"); DRIVER.setProperty(DEF_LANG, "token_error", "Please provide a token inside of the config.yml"); DRIVER.setProperty(DEF_LANG, "execution_error", "Error occurred on Command Execution: %1s"); DRIVER.setProperty(DEF_LANG, "notsend_error", "Message could not be sent! Error: %1s"); DRIVER.setProperty(DEF_LANG, "notdeleted_error", "Message could not be deleted! Error: %1s"); DRIVER.setProperty(DEF_LANG, "notsendpermission_error", "Message could not be send! The Bot has not enough Permissions for [%1s] - #%2s Error: %3s"); DRIVER.setProperty(DEF_LANG, "private_error", "You can not use commands in direct messages"); DRIVER.setProperty(DEF_LANG, "nopermissions_error", "You have no Permission to use this command."); DRIVER.setProperty(DEF_LANG, "nomanagepermission_error", "The Bot has no Permission to Manage Messages"); DRIVER.setProperty(DEF_LANG, "tofewarguments_error", "You have provided to few arguments. %1s of %2s"); DRIVER.setProperty(DEF_LANG, "tomanyarguments_error", "You have provided to many arguments. %1s of %2s"); DRIVER.setProperty(DEF_LANG, "botowner_error", "This command can only be used from Bot Owner."); DRIVER.setProperty(DEF_LANG, "deletion_error", "Deletion of Messages failed (%1s of %2s) Error: %3s"); //Infos DRIVER.setProperty(DEF_LANG, "login_info", "Bot sign into the server. Please wait until the Bot is ready..."); //Stats Command DRIVER.setProperty(DEF_LANG, "stats_title", "General Stats"); DRIVER.setProperty(DEF_LANG, "stats_servercount", "Server Count"); DRIVER.setProperty(DEF_LANG, "stats_shards", "Shards"); DRIVER.setProperty(DEF_LANG, "stats_owner", "Bot Owner"); DRIVER.setProperty(DEF_LANG, "stats_user", "Users"); DRIVER.setProperty(DEF_LANG, "stats_commands", "Commands"); DRIVER.setProperty(DEF_LANG, "stats_uptime", "Uptime"); //Help Command DRIVER.setProperty(DEF_LANG, "help_title", "All Commands"); DRIVER.setProperty(DEF_LANG, "help_command", "Commands"); DRIVER.setProperty(DEF_LANG, "help_alias", "Alias"); DRIVER.setProperty(DEF_LANG, "help_arguments", "Arguments"); DRIVER.setProperty(DEF_LANG, "help_description", "Description"); //Deletion DRIVER.setProperty(DEF_LANG, "del_topic", "Deletion %1s of %2s"); //Search DRIVER.setProperty(DEF_LANG, "engine_unknown", "The Search Engine is unknown!"); DRIVER.setProperty(DEF_LANG, "results_end", "You reached the end of results."); DRIVER.setProperty(DEF_LANG, "results_cleared", "The results were cleared."); } }
src/main/java/storage/LanguageLoader.java
package storage; import main.Fast; import util.Console; /** * Created by N.Hartmann on 29.06.2017. * Copyright 2017 */ public class LanguageLoader implements Fast{ public String LANG; private String DEF_LANG = "lang_en.json"; public String ERROR = ":warning: "; private static LanguageLoader instance; /** * Get Instance * @return Class Instance */ public static LanguageLoader getInstance() { if (instance == null) { instance = new LanguageLoader(); } return instance; } /** * Create Language Files */ public void createTranslations() { if (DRIVER.getProperty(DRIVER.CONFIG, "language", "en").equals("en")) { LANG = DEF_LANG; } else { LANG = "lang_"+ DRIVER.getProperty(DRIVER.CONFIG, "language", "en").equals("en")+".json"; DRIVER.createNewFile(LANG); } DRIVER.createNewFile(DEF_LANG); } /** * Get an Translation String * @param option option * @return String */ public String getTranslation(String option) { return DRIVER.getProperty(LANG, option, DRIVER.getLangProperty(DEF_LANG, option).toString()).toString(); } /** * Save Default Config to File */ public void setDefaultLanguage() { Console.debug("DefaultLanguage Loading for Fallback"); //Errors DRIVER.setProperty(DEF_LANG, "common_error", "There was an error!"); DRIVER.setProperty(DEF_LANG, "commonmessage_error", "There was an error! Error: {1}"); DRIVER.setProperty(DEF_LANG, "annotation_error", "Invalid Annotation in Module {1} Ex: {2}"); DRIVER.setProperty(DEF_LANG, "token_error", "Please provide a token inside of the config.yml"); DRIVER.setProperty(DEF_LANG, "execution_error", "Error occurred on Command Execution: {1}"); DRIVER.setProperty(DEF_LANG, "notsend_error", "Message could not be sent! Error: {1}"); DRIVER.setProperty(DEF_LANG, "notdeleted_error", "Message could not be deleted! Error: {1}"); DRIVER.setProperty(DEF_LANG, "notsendpermission_error", "Message could not be send! The Bot has not enough Permissions for [{1}] - #{2} Error: {1}"); DRIVER.setProperty(DEF_LANG, "private_error", "You can not use commands in direct messages"); DRIVER.setProperty(DEF_LANG, "nopermissions_error", "You have no Permission to use this command."); DRIVER.setProperty(DEF_LANG, "nomanagepermission_error", "The Bot has no Permission to Manage Messages"); DRIVER.setProperty(DEF_LANG, "tofewarguments_error", "You have provided to few arguments. {1} of {2}"); DRIVER.setProperty(DEF_LANG, "tomanyarguments_error", "You have provided to many arguments. {1} of {2}"); DRIVER.setProperty(DEF_LANG, "botowner_error", "This command can only be used from Bot Owner."); DRIVER.setProperty(DEF_LANG, "deletion_error", "Deletion of Messages failed ({1} of {2}) Error: {3}"); //Infos DRIVER.setProperty(DEF_LANG, "login_info", "Bot sign into the server. Please wait until the Bot is ready..."); //Stats Command DRIVER.setProperty(DEF_LANG, "stats_title", "General Stats"); DRIVER.setProperty(DEF_LANG, "stats_servercount", "Server Count"); DRIVER.setProperty(DEF_LANG, "stats_shards", "Shards"); DRIVER.setProperty(DEF_LANG, "stats_owner", "Bot Owner"); DRIVER.setProperty(DEF_LANG, "stats_user", "Users"); DRIVER.setProperty(DEF_LANG, "stats_commands", "Commands"); DRIVER.setProperty(DEF_LANG, "stats_uptime", "Uptime"); //Help Command DRIVER.setProperty(DEF_LANG, "help_title", "All Commands"); DRIVER.setProperty(DEF_LANG, "help_command", "Commands"); DRIVER.setProperty(DEF_LANG, "help_alias", "Alias"); DRIVER.setProperty(DEF_LANG, "help_arguments", "Arguments"); DRIVER.setProperty(DEF_LANG, "help_description", "Description"); //Deletion DRIVER.setProperty(DEF_LANG, "del_topic", "Deletion {1} of {2}"); //Search DRIVER.setProperty(DEF_LANG, "engine_unknown", "The Search Engine is unknown!"); DRIVER.setProperty(DEF_LANG, "results_end", "You reached the end of results."); DRIVER.setProperty(DEF_LANG, "results_cleared", "The results were cleared."); } }
29.06.2017 -- Replaced Placeholder with java Placeholders
src/main/java/storage/LanguageLoader.java
29.06.2017 -- Replaced Placeholder with java Placeholders
Java
apache-2.0
8ea582117ca25209d23dc768fde12611801ad18c
0
bozimmerman/CoffeeMud,bozimmerman/CoffeeMud,bozimmerman/CoffeeMud,bozimmerman/CoffeeMud
package com.planet_ink.coffee_mud.Items.Weapons; import java.util.*; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.CMSecurity.DbgFlag; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.Basic.StdRideable; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; /* Copyright 2016-2021 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class StdSiegeWeapon extends StdRideable implements AmmunitionWeapon, SiegableItem { @Override public String ID() { return "StdSiegeWeapon"; } protected int weaponDamageType = TYPE_PIERCING; protected int weaponClassification = CLASS_RANGED; protected boolean useExtendedMissString = false; protected int minRange = 0; protected int maxRange = 10; protected int ammoCapacity = 1; protected volatile int nextTacticalMoveDir = -1; protected volatile int lastSpamCt = 0; protected volatile String lastSpamMsg = ""; protected PairList<MOB, Long> otherUsers = new PairVector<MOB, Long>(); protected SiegableItem siegeTarget = null; protected Room siegeCombatRoom = null; protected PairList<Item, int[]> coordinates = null; protected volatile int[] aiming = null; public StdSiegeWeapon() { super(); setName("a siege weapon bow"); setDisplayText("a siege weapon is mounted here."); setDescription("It looks like it might fire special ammunition"); basePhyStats().setAbility(0); basePhyStats().setLevel(0); basePhyStats.setWeight(500); basePhyStats().setAttackAdjustment(0); basePhyStats().setDamage(20); //basePhyStats().setSensesMask(basePhyStats().sensesMask()|PhyStats.SENSE_ITEMNOTGET); setAmmunitionType("spears"); setAmmoCapacity(1); setAmmoRemaining(1); baseGoldValue=15000; recoverPhyStats(); minRange=1; maxRange=10; setRiderCapacity(0); weaponDamageType=Weapon.TYPE_PIERCING; material=RawMaterial.RESOURCE_WOOD; weaponClassification=Weapon.CLASS_RANGED; properWornBitmap=0; wornLogicalAnd = false; } @Override public int weaponDamageType() { return weaponDamageType; } @Override public int weaponClassification() { return weaponClassification; } @Override public void setWeaponDamageType(final int newType) { weaponDamageType = newType; } @Override public void setWeaponClassification(final int newClassification) { weaponClassification = newClassification; } @Override public boolean isFreeStanding() { return true; } @Override public String secretIdentity() { String id=super.secretIdentity(); if(phyStats().ability()>0) id=name()+" +"+phyStats().ability()+((id.length()>0)?"\n":"")+id; else if(phyStats().ability()<0) id=name()+" "+phyStats().ability()+((id.length()>0)?"\n":"")+id; return id+"\n\rAttack: "+phyStats().attackAdjustment()+", Damage: "+phyStats().damage(); } @Override public void recoverPhyStats() { super.recoverPhyStats(); if(phyStats().damage()!=0) { final int ability=super.wornLogicalAnd ? (phyStats().ability()*CMath.numberOfSetBits(super.myWornCode)) : phyStats().ability(); phyStats().setDamage(phyStats().damage()+(ability*2)); phyStats().setAttackAdjustment(phyStats().attackAdjustment()+(ability*10)); } if((subjectToWearAndTear())&&(usesRemaining()<100)) phyStats().setDamage(((int)Math.round(CMath.mul(phyStats().damage(),CMath.div(usesRemaining(),100))))); } @Override public void setRangeToTarget(final int newRange) { //nothing to do atm } protected int getDirectionToTarget(final PhysicalAgent dirTarget) { if((dirTarget != null)&&(dirTarget instanceof SiegableItem)) { final SiegableItem siegeTarget = this.siegeTarget; final int[] targetCoords = siegeTarget.getTacticalCoords(); final int[] myCoords = this.getTacticalCoords(); if((myCoords!=null)&&(targetCoords != null)) return Directions.getRelative11Directions(myCoords, targetCoords); } return -1; } @Override public final int getMaxHullPoints() { return 12; } @Override public int rangeToTarget() { return getTacticalDistance(siegeTarget); } @Override public boolean mayPhysicallyAttack(final PhysicalAgent victim) { if(!mayIFight(victim)) return false; return CMLib.map().roomLocation(this) == CMLib.map().roomLocation(victim); } @Override public boolean isInCombat() { final Physical siegeTarget=this.siegeTarget; if((siegeTarget != null)&& (siegeCombatRoom != null)) { if(siegeTarget.amDestroyed()) { clearTacticalModeInternal(); return false; } return true; } return false; } @Override public boolean isDefeated() { return amDestroyed(); } @Override public boolean mayIFight(final PhysicalAgent victim) { final PhysicalAgent defender=victim; MOB mob = null; for(final Enumeration<Rider> r=riders();r.hasMoreElements();) { final Rider R=r.nextElement(); if(R instanceof MOB) mob=(MOB)R; } if(mob==null) return true; return CMLib.combat().mayIAttackThisVessel(mob, defender); } protected int[] getMagicCoords() { final Room R=CMLib.map().roomLocation(this); final int[] coords; //final int middle = (int)Math.round(Math.floor(R.maxRange() / 2.0)); final int extreme = R.maxRange()-1; final int extremeRandom = (extreme > 0) ? CMLib.dice().roll(1, R.maxRange(), -1) : 0; final int extremeRandom2 = (extreme > 0) ? CMLib.dice().roll(1, R.maxRange(), -1) : 0; coords = new int[] {extremeRandom, extremeRandom2}; return coords; } protected int getTacticalDistance(final SiegableItem targetI) { if(targetI==null) return CMLib.map().roomLocation(this).maxRange() + 1; final int[] fromCoords = this.getTacticalCoords(); final PairList<Item,int[]> coords = this.getCombatField(); // might not yet be set. int lowest = Integer.MAX_VALUE; if((coords != null) && (fromCoords != null)) { final int p = coords.indexOfFirst(targetI); if(p >=0) { final Pair<Item,int[]> P=coords.get(p); final int distance = (int)Math.round(Math.ceil(Math.sqrt(Math.pow(P.second[0]-fromCoords[0],2.0) + Math.pow(P.second[1]-fromCoords[1],2.0)))); if(distance < lowest) lowest=distance; } } if(lowest == Integer.MAX_VALUE) return CMLib.map().roomLocation(this).maxRange() + 1; return lowest; } protected boolean isAnyoneAtCoords(final int[] xy) { final PairList<Item, int[]> coords = this.coordinates; if(coords != null) { for(final Iterator<int[]> i = coords.secondIterator(); i.hasNext();) { if(Arrays.equals(xy, i.next())) return true; } } return false; } protected synchronized boolean amInTacticalMode() { final Item siegeTarget = this.siegeTarget; final Room siegeCombatRoom = this.siegeCombatRoom; if((siegeTarget != null) && (!siegeTarget.amDestroyed()) && (siegeCombatRoom != null) && (siegeCombatRoom.isContent(siegeTarget)) && (siegeCombatRoom.isContent(this)) ) { if(coordinates == null) { synchronized((""+siegeCombatRoom + "_SIEGE_TACTICAL").intern()) { for(int i=0;i<siegeCombatRoom.numItems();i++) { final Item I=siegeCombatRoom.getItem(i); if((I instanceof SiegableItem) &&(((SiegableItem)I).getCombatField() != null)) { this.coordinates = ((SiegableItem)I).getCombatField(); } } if(coordinates == null) { this.coordinates = new SPairList<Item,int[]>(); } } final PairList<Item,int[]> coords = this.coordinates; if(coords != null) { if(!coords.containsFirst(this)) { int[] newCoords = null; for(int i=0;i<10;i++) { newCoords = this.getMagicCoords(); if(!isAnyoneAtCoords(newCoords)) break; } coords.add(new Pair<Item,int[]>(this,newCoords)); } } } return true; } else { this.siegeTarget = null; this.siegeCombatRoom = null; this.coordinates = null; return false; } } protected void clearTacticalMode() { synchronized((""+siegeCombatRoom + "_SIEGE_TACTICAL").intern()) { final PairList<Item,int[]> coords = this.coordinates; if(coords != null) { coords.removeFirst(this); } } this.siegeTarget = null; this.siegeCombatRoom = null; this.coordinates = null; this.aiming = null; } protected synchronized void clearTacticalModeInternal() { final Room siegeCombatRoom = this.siegeCombatRoom; if(siegeCombatRoom != null) { PairList<Item,int[]> coords = null; synchronized((""+siegeCombatRoom + "_SIEGE_TACTICAL").intern()) { coords = this.coordinates; } clearTacticalMode(); if(coords != null) { for(final Iterator<Item> s = coords.firstIterator();s.hasNext();) { final Item I=s.next(); if((I instanceof SiegableItem) &&(((SiegableItem)I).getCombatant() == this)) ((SiegableItem)I).setCombatant(null); } } } this.otherUsers.clear(); CMLib.threads().deleteTick(this, Tickable.TICKID_SPECIALCOMBAT); } @Override public void makePeace(final boolean includePlayerFollowers) { clearTacticalModeInternal(); } @Override public PhysicalAgent getCombatant() { return this.siegeTarget; } @Override public void setCombatant(final PhysicalAgent other) { final Room R=(owner() instanceof Room)?(Room)owner():CMLib.map().roomLocation(this); if(other == null) clearTacticalModeInternal(); else { if(other instanceof SiegableItem) siegeTarget = (SiegableItem)other; if(R != null) siegeCombatRoom = R; if(other instanceof Combatant) { if(((Combatant)other).getCombatant()==null) ((Combatant)other).setCombatant(this); } amInTacticalMode(); // now he is in combat if(!CMLib.threads().isTicking(this, Tickable.TICKID_SPECIALCOMBAT)) CMLib.threads().startTickDown(this, Tickable.TICKID_SPECIALCOMBAT, CombatLibrary.TICKS_PER_SHIP_COMBAT); } } @Override public int[] getTacticalCoords() { final PairList<Item, int[]> coords = this.coordinates; if(coords != null) { for(final Iterator<Pair<Item,int[]>> i = coords.iterator(); i.hasNext();) { final Pair<Item,int[]> P=i.next(); if(P.first == this) return P.second; } } return null; } @Override public int getDirectionToTarget() { return this.getDirectionToTarget(this.siegeTarget); } @Override public PairList<Weapon,int[]> getSiegeWeaponAimings() { final PairList<Weapon, int[]> aimings = new PairVector<Weapon, int[]>(); if(aiming==null) return aimings; aimings.add(this, aiming); return aimings; } @Override public void destroy() { super.destroy(); CMLib.threads().deleteTick(this, Tickable.TICKID_SPECIALCOMBAT); } @Override public String getTacticalView(final SiegableItem viewer) { final int[] targetCoords = getTacticalCoords(); final int[] myCoords; final String dist = ""+getTacticalDistance(viewer); if(viewer instanceof PhysicalAgent) { myCoords = viewer.getTacticalCoords(); if((myCoords!=null)&&(targetCoords != null)) { final String dirFromYou = CMLib.directions().getDirectionName(Directions.getRelative11Directions(myCoords, targetCoords)); return L("@x1 is @x2 of you at a distance of @x3.",name(),dirFromYou,dist); } else return L("@x1 is at a distance of @x2.",name(),dist); } else return L("@x1 is at a distance of @x2.",name(),dist); } @Override public PairList<Item, int[]> getCombatField() { return coordinates; } @Override public void executeMsg(final Environmental myHost, final CMMsg msg) { super.executeMsg(myHost,msg); if(msg.amITarget(this)) { switch(msg.targetMinor()) { case CMMsg.TYP_LOOK: case CMMsg.TYP_EXAMINE: if(CMLib.flags().canBeSeenBy(this,msg.source())) { if(requiresAmmunition()) msg.source().tell(L("@x1 remaining: @x2/@x3.",CMStrings.capitalizeAndLower(ammunitionType()),""+ammunitionRemaining(),""+ammunitionCapacity())); if((subjectToWearAndTear())&&(usesRemaining()<100)) msg.source().tell(weaponHealth()); } break; case CMMsg.TYP_RELOAD: if(msg.tool() instanceof Ammunition) { boolean recover=false; final Ammunition I=(Ammunition)msg.tool(); int howMuchToTake=ammunitionCapacity(); if(I.ammunitionRemaining()<howMuchToTake) howMuchToTake=I.ammunitionRemaining(); if(this.ammunitionCapacity() - this.ammunitionRemaining() < howMuchToTake) howMuchToTake=this.ammunitionCapacity() - this.ammunitionRemaining(); setAmmoRemaining(this.ammunitionRemaining() + howMuchToTake); I.setAmmoRemaining(I.ammunitionRemaining()-howMuchToTake); final LinkedList<Ability> removeThese=new LinkedList<Ability>(); for(final Enumeration<Ability> a=effects();a.hasMoreElements();) { final Ability A=a.nextElement(); if((A!=null)&&(!A.isSavable())&&(A.invoker()==null)) removeThese.add(A); } for(final Ability A : removeThese) delEffect(A); for(final Enumeration<Ability> a=I.effects();a.hasMoreElements();) { Ability A=a.nextElement(); if((A!=null)&&(A.isSavable())&&(fetchEffect(A.ID())==null)) { A=(Ability)A.copyOf(); A.setInvoker(null); A.setSavable(false); addEffect(A); recover=true; } } if(I.ammunitionRemaining()<=0) I.destroy(); if(recover) recoverOwner(); } break; case CMMsg.TYP_UNLOAD: if(msg.tool() instanceof Ammunition) { final Ammunition ammo=(Ammunition)msg.tool(); for(final Enumeration<Ability> a=effects();a.hasMoreElements();) { final Ability A=a.nextElement(); if((A!=null)&&(!A.isSavable())&&(A.invoker()==null)) { final Ability ammoA=(Ability)A.copyOf(); ammo.addNonUninvokableEffect(ammoA); } } setAmmoRemaining(0); final Room R=msg.source().location(); if(R!=null) { R.addItem(ammo, ItemPossessor.Expire.Player_Drop); CMLib.commands().postGet(msg.source(), null, ammo, true); } } break; case CMMsg.TYP_DAMAGE: if(msg.value() > 0) { int level = phyStats().level(); if(level < 10) level = 10; final double pctLoss = CMath.div(msg.value(), level) * 10.0; // siege weapons against rideables is harsh final int pointsLost = (int)Math.round(pctLoss * level); if(pointsLost > 0) { final int weaponType = (msg.tool() instanceof Weapon) ? ((Weapon)msg.tool()).weaponDamageType() : Weapon.TYPE_BASHING; final String hitWord = CMLib.combat().standardHitWord(weaponType, pctLoss); final String msgStr = (msg.targetMessage() == null) ? L("<O-NAME> fired from <S-NAME> hits and @x1 @x2.",hitWord,name()) : msg.targetMessage(); final CMMsg deckHitMsg=CMClass.getMsg(msg.source(), this, msg.tool(),CMMsg.MSG_OK_ACTION, msgStr); final Room targetRoom=CMLib.map().roomLocation(this); if(targetRoom.okMessage(msg.source(), deckHitMsg)) targetRoom.send(msg.source(), deckHitMsg); if(pointsLost >= this.usesRemaining()) { this.setUsesRemaining(0); this.recoverPhyStats(); // takes away the swimmability! final Room shipR=CMLib.map().roomLocation(this); if(shipR!=null) { final String sinkString = L("<T-NAME> <T-IS-ARE> destroyed!"); shipR.show(msg.source(), this, CMMsg.MSG_OK_ACTION, sinkString); this.destroy(); } } else { this.setUsesRemaining(this.usesRemaining() - pointsLost); } } } break; } } else if((msg.tool()==this) &&(msg.targetMinor()==CMMsg.TYP_WEAPONATTACK) &&(weaponClassification()==Weapon.CLASS_THROWN)) msg.addTrailerMsg(CMClass.getMsg(msg.source(),this,CMMsg.MSG_DROP,null)); if((msg.targetMinor()==CMMsg.TYP_DAMAGE) &&(msg.target()==this) &&(msg.value()>0) &&(subjectToWearAndTear()) &&((!CMLib.flags().isABonusItems(this))||(CMLib.dice().rollPercentage() > phyStats().level())) &&((material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_ENERGY) &&((material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_GAS)) { CMLib.combat().postItemDamage(msg.source(), this, null, 1, CMMsg.TYP_JUSTICE, null); } } protected MOB getFactoryAttacker(final Room thisRoom) { final MOB mob = CMClass.getFactoryMOB(name(),phyStats().level(),thisRoom); mob.setRiding(this); for(final MOB M : this.getPlayerAttackers()) { for(final Pair<Clan,Integer> C : M.clans()) { if(mob.getClanRole(C.first.clanID())==null) mob.setClan(C.first.clanID(), C.second.intValue()); } } return mob; } protected Boolean startAttack(final MOB sourceM, final Room thisRoom, final String rest) { final Item I=thisRoom.findItem(rest); if((I instanceof SiegableItem) &&(I!=this) &&(CMLib.flags().canBeSeenBy(I, sourceM))) { if(!sourceM.mayPhysicallyAttack(I)) { sourceM.tell(L("You are not permitted to attack @x1",I.name())); return Boolean.FALSE; } final MOB mob = getFactoryAttacker(thisRoom); try { final CMMsg maneuverMsg=CMClass.getMsg(mob,I,null,CMMsg.MSG_ADVANCE,null,CMMsg.MASK_MALICIOUS|CMMsg.MSG_ADVANCE,null,CMMsg.MSG_ADVANCE,L("<S-NAME> engage(s) @x1.",I.Name())); if(thisRoom.okMessage(mob, maneuverMsg)) { thisRoom.send(mob, maneuverMsg); siegeTarget = (SiegableItem)I; siegeCombatRoom = thisRoom; if(I instanceof SiegableItem) { final SiegableItem otherI=(SiegableItem)I; if(otherI.getCombatant() == null) otherI.setCombatant(this); } amInTacticalMode(); // now he is in combat if(!CMLib.threads().isTicking(this, Tickable.TICKID_SPECIALCOMBAT)) CMLib.threads().startTickDown(this, Tickable.TICKID_SPECIALCOMBAT, CombatLibrary.TICKS_PER_SHIP_COMBAT); //also support ENGAGE <name> as an alternative to attack? return Boolean.TRUE; } } finally { mob.destroy(); } } return null; } protected void addPlayerAttacker(final MOB M) { if((!(owner() instanceof Room)) ||(M==null) ||(!M.isPlayer())) return; final Room R=(Room)owner(); synchronized(this.otherUsers) { final long expire = System.currentTimeMillis() - 300000; for(final Iterator<Pair<MOB, Long>> p = this.otherUsers.iterator();p.hasNext();) { final Pair<MOB, Long> P = p.next(); if(P.first == M) { P.second=Long.valueOf(System.currentTimeMillis()); return; } else if(P.first.location()!=R) p.remove(); else if(expire > P.second.longValue()) p.remove(); } this.otherUsers.add(new Pair<MOB, Long>(M,Long.valueOf(System.currentTimeMillis()))); } } protected List<MOB> getPlayerAttackers() { final List<MOB> players=new LinkedList<MOB>(); if(!(owner() instanceof Room)) return players; final Room R=(Room)owner(); synchronized(this.otherUsers) { final long expire = System.currentTimeMillis() - 300000; for(final Iterator<Pair<MOB, Long>> p = this.otherUsers.iterator();p.hasNext();) { final Pair<MOB, Long> P = p.next(); if(P.first.location()!=R) p.remove(); else if(expire > P.second.longValue()) p.remove(); else players.add(P.first); } } for(final Enumeration<Rider> r=riders();r.hasMoreElements();) { final Rider rR=r.nextElement(); if((rR instanceof MOB) &&(((MOB)rR).location()==R) &&(!players.contains(rR))) players.add((MOB)rR); } return players; } public void announceToUsers(final String msgStr) { for(final MOB M : this.getPlayerAttackers()) M.tell(msgStr); } public boolean tick(final Tickable ticking, final int tickID) { if(tickID == Tickable.TICKID_SPECIALCOMBAT) { if(this.amInTacticalMode()) { final int direction = this.nextTacticalMoveDir; if(direction >= 0) { this.nextTacticalMoveDir = -1; final Room thisRoom=CMLib.map().roomLocation(this); if((thisRoom != null) && this.amInTacticalMode()) { int[] tacticalCoords = null; int x=0; try { while((x>=0)&&(this.coordinates!=null)&&(tacticalCoords==null)) { x=this.coordinates.indexOfFirst(this); final Pair<Item,int[]> pair = (x>=0) ? this.coordinates.get(x) : null; if(pair == null) break; else if(pair.first != this) x=this.coordinates.indexOfFirst(this); else tacticalCoords = pair.second; } } catch(final Exception e) { } if(tacticalCoords != null) { final MOB mob = this.getFactoryAttacker(thisRoom); try { final String directionName = CMLib.directions().getDirectionName(direction).toLowerCase(); final int[] newCoords = Directions.adjustXYByDirections(tacticalCoords[0], tacticalCoords[1], direction); final CMMsg maneuverMsg=CMClass.getMsg(mob, thisRoom, null, CMMsg.MSG_ADVANCE,newCoords[0]+","+newCoords[1], CMMsg.MSG_ADVANCE,directionName, CMMsg.MSG_ADVANCE,L("<S-NAME> maneuver(s) @x1.",directionName)); if(thisRoom.okMessage(mob, maneuverMsg)) { thisRoom.send(mob, maneuverMsg); tacticalCoords[0] = newCoords[0]; tacticalCoords[1] = newCoords[1]; if(CMSecurity.isDebugging(DbgFlag.SIEGECOMBAT)) Log.debugOut("SiegeCombat: "+Name()+" maneuvers to "+CMParms.toListString(tacticalCoords)); } } finally { mob.destroy(); } } } } final MOB mob = getFactoryAttacker(null); final int[] coordsToHit; final SiegableItem siegeTarget; synchronized(this) { siegeTarget=this.siegeTarget; } coordsToHit = siegeTarget.getTacticalCoords(); try { int notLoaded = 0; int notAimed = 0; final int[] aiming=this.aiming; final Weapon w=this; final Room R=CMLib.map().roomLocation(w); if(R!=null) { mob.setLocation(R); if((w instanceof AmmunitionWeapon) &&(((AmmunitionWeapon)w).requiresAmmunition()) &&(((AmmunitionWeapon)w).ammunitionRemaining() <=0)) notLoaded++; else if(aiming!=null) { final boolean wasHit = Arrays.equals(aiming, coordsToHit); CMLib.combat().postSiegeAttack(mob, this, siegeTarget, w, wasHit); if(CMSecurity.isDebugging(DbgFlag.SIEGECOMBAT)) { final String targetedName=siegeTarget!=null?siegeTarget.Name():"Unknown"; Log.debugOut("SiegeCombat: "+Name()+" aimed "+w.Name()+" at "+CMParms.toListString(aiming) +" and "+(wasHit?"hit ":"missed ")+targetedName+" at "+CMParms.toListString(coordsToHit)); } this.aiming=null; // reset for next attack } else notAimed++; } final String spamMsg; if((notLoaded > 0) && (notAimed > 0)) spamMsg = L("@x1 was not loaded and not aimed.",name()); else if(notLoaded > 0) spamMsg = L("@x1 was not loaded.",name()); else if(notAimed > 0) spamMsg = L("@x1 was not aimed.",name()); else spamMsg = ""; if(spamMsg.length()>0) { if(spamMsg.equals(lastSpamMsg)) { if(lastSpamCt < 3) { if(CMSecurity.isDebugging(DbgFlag.SIEGECOMBAT)) { final String targetedName=siegeTarget!=null?siegeTarget.Name():"Unknown"; Log.debugOut("SiegeCombat: "+Name()+" targeted: "+targetedName+", status: "+spamMsg); } announceToUsers(spamMsg); lastSpamCt++; } } else { if(CMSecurity.isDebugging(DbgFlag.SIEGECOMBAT)) { final String targetedName=siegeTarget!=null?siegeTarget.Name():"Unknown"; Log.debugOut("SiegeCombat: "+Name()+" targeted: "+targetedName+", status: "+spamMsg); } announceToUsers(spamMsg); lastSpamCt=0; } } lastSpamMsg=spamMsg; } finally { mob.setRangeToTarget(0); mob.destroy(); } } else return false; } return super.tick(ticking, tickID); } @Override public boolean okMessage(final Environmental myHost, final CMMsg msg) { if(!super.okMessage(myHost,msg)) return false; if((msg.targetMinor()==CMMsg.TYP_WEAPONATTACK) &&(msg.tool()==this) &&(requiresAmmunition()) &&(ammunitionCapacity()>0)) { if(ammunitionRemaining()>ammunitionCapacity()) setAmmoRemaining(ammunitionCapacity()); if(ammunitionRemaining()<=0) return false; else setUsesRemaining(usesRemaining()-1); } else if((msg.sourceMinor()==CMMsg.TYP_HUH) &&(msg.targetMessage()!=null) &&(owner() instanceof Room) &&(!(((Room)owner()).getArea() instanceof BoardableItem))) { final List<String> cmds=CMParms.parse(msg.targetMessage()); if(cmds.size()<1) return true; final String word=cmds.get(0).toUpperCase(); // MUST IMPLEMENT AIM, since your target might be moving. if("TARGET".startsWith(word)) { final boolean isRiding=msg.source().riding()==this; if((cmds.size()==1) ||((!isRiding)&&(cmds.size()<3))) { if(isRiding) msg.source().tell(L("You must specify a target.")); else msg.source().tell(L("You must which weapon to target, and at what.")); return false; } final Room thisRoom = (Room)owner(); if(thisRoom==null) { msg.source().tell(L("@x1 is nowhere to be found!",name())); return false; } if(!isRiding) { final String what=cmds.get(1); if(msg.source().location().findItem(null, what)!=this) return true; cmds.remove(1); } for(final MOB M: msg.source().getGroupMembers(new HashSet<MOB>())) this.addPlayerAttacker(M); final String rest = CMParms.combine(cmds,1); final Boolean result = startAttack(msg.source(),thisRoom,rest); if(result == Boolean.TRUE) { if(this.siegeTarget != null) { msg.source().tell(L("You are now targeting @x1.",this.siegeTarget.Name())); msg.source().tell(this.siegeTarget.getTacticalView(this)); } return false; } else if(result == Boolean.FALSE) return false; else { msg.source().tell(L("You don't see '@x1' here to target",rest)); return false; } } else if("AIM".startsWith(word)) { if(!this.amInTacticalMode()) { msg.source().tell(L("You must be in tactical mode to aim.")); return false; } final boolean isRiding=msg.source().riding()==this; if((cmds.size()==1) ||((!isRiding)&&(cmds.size()<3))) { if(isRiding) msg.source().tell(L("You must specify an amount to lead the target.")); else msg.source().tell(L("You must which weapon to aim, and how far ahead of the target to aim it.")); return false; } final Room thisRoom = (Room)owner(); if(thisRoom==null) { msg.source().tell(L("@x1 is nowhere to be found!",name())); return false; } if(!isRiding) { final String what=cmds.get(1); if(msg.source().location().findItem(null, what)!=this) return true; cmds.remove(1); } for(final MOB M: msg.source().getGroupMembers(new HashSet<MOB>())) this.addPlayerAttacker(M); final String rest = CMParms.combine(cmds,1); if((!CMath.isInteger(rest))||(CMath.s_int(rest)<0)) { if(this.siegeTarget!=null) msg.source().tell(L("'@x1' is not a valid distance ahead of @x2 to fire.",rest,this.siegeTarget.name())); else msg.source().tell(L("'@x1' is not a valid distance.",rest)); return false; } int distance = maxRange(); int[] targetCoords = new int[2]; int leadAmt=0; if(this.siegeTarget instanceof SiegableItem) { targetCoords = this.siegeTarget.getTacticalCoords(); if(targetCoords == null) { msg.source().tell(L("You must be targeting an enemy to aim weapons.")); return false; } distance = rangeToTarget(); leadAmt = CMath.s_int(rest); final int direction; if(this.siegeTarget instanceof NavigableItem) direction = ((NavigableItem)this.siegeTarget).getDirectionFacing(); else direction = CMLib.dice().roll(1, Directions.NUM_DIRECTIONS(), -1); for(int i=0;i<leadAmt;i++) targetCoords = Directions.adjustXYByDirections(targetCoords[0], targetCoords[1], direction); } if((maxRange() < distance)||(minRange() > distance)) { if(CMSecurity.isDebugging(DbgFlag.SIEGECOMBAT)) Log.debugOut("SiegeCombat: "+Name()+" target is presently at distance of "+distance+", but "+Name()+" range is "+minRange()+" to "+maxRange()); msg.source().tell(L("Your target is presently at distance of @x1, but this weapons range is @x2 to @x3.", ""+distance,""+minRange(),""+maxRange())); return false; } if(requiresAmmunition() && (ammunitionCapacity() > 0) && (ammunitionRemaining() == 0)) { if(CMSecurity.isDebugging(DbgFlag.SIEGECOMBAT)) Log.debugOut("SiegeCombat: "+Name()+": "+Name()+" wasn't loaded, couldn't be aimed."); msg.source().tell(L("@x1 needs to be LOADed first.",Name())); return false; } final String timeToFire=""+(CMLib.threads().msToNextTick(this, Tickable.TICKID_SPECIALCOMBAT) / 1000); final String msgStr=L("<S-NAME> aim(s) <O-NAME> at <T-NAME> (@x1).",""+leadAmt); if(msg.source().isMonster() && aiming != null) { msg.source().tell(L("@x1 is already aimed.",Name())); return false; } final CMMsg msg2=CMClass.getMsg(msg.source(), siegeTarget, this, CMMsg.MSG_NOISYMOVEMENT, msgStr); if(thisRoom.okMessage(msg.source(), msg2)) { this.aiming = targetCoords; thisRoom.send(msg.source(), msg2); if(CMSecurity.isDebugging(DbgFlag.SIEGECOMBAT)) Log.debugOut("SiegeCombat: "+Name()+": aimed "+Name()+" at : "+CMParms.toListString(targetCoords)); if(!this.requiresAmmunition()) msg.source().tell(L("@x1 is now aimed and will be engage in @x2 seconds.",name(),timeToFire)); else msg.source().tell(L("@x1 is now aimed and will be fired in @x2 seconds.",name(),timeToFire)); } } } else if((msg.target()==this) &&((msg.targetMinor()==CMMsg.TYP_PUSH)||(msg.targetMinor()==CMMsg.TYP_PULL)) &&(msg.tool() instanceof Room) &&(this.amInTacticalMode()) &&(msg.value()>=0) &&(msg.value()<Directions.NUM_DIRECTIONS())) { for(final MOB M: msg.source().getGroupMembers(new HashSet<MOB>())) this.addPlayerAttacker(M); msg.setTool(null); // this is even better than cancelling it. msg.source().tell(L("<S-NAME> order(s) @x1 moved @x2.",name(msg.source()),CMLib.directions().getDirectionName(msg.value()).toLowerCase())); this.nextTacticalMoveDir=msg.value(); return false; } else if((msg.targetMinor()==CMMsg.TYP_LEAVE) &&(msg.target() instanceof Room) &&(msg.source().location()==owner()) &&(this.riding()!=null) &&(msg.source().riding()!=this) &&((msg.source().riding()!=null) ||(msg.source().numFollowers()>0) ||((msg.source() instanceof Rideable)&&((Rideable)msg.source()).numRiders()>0))) { final Set<Physical> grp=CMLib.tracking().getAllGroupRiders(msg.source(), msg.source().location()); if(grp.contains(this) &&(this.amInTacticalMode())) { for(final MOB M: msg.source().getGroupMembers(new HashSet<MOB>())) this.addPlayerAttacker(M); msg.source().tell(L("<S-NAME> order(s) @x1 moved @x2.",name(msg.source()),CMLib.directions().getDirectionName(msg.value()).toLowerCase())); this.nextTacticalMoveDir=msg.value(); return false; } } return true; } @Override public void setUsesRemaining(int newUses) { if(newUses==Integer.MAX_VALUE) newUses=100; super.setUsesRemaining(newUses); } protected String weaponHealth() { if(usesRemaining()>=100) return ""; else if(usesRemaining()>=95) return name()+" looks slightly used ("+usesRemaining()+"%)"; else if(usesRemaining()>=85) return name()+" is somewhat worn ("+usesRemaining()+"%)"; else if(usesRemaining()>=75) return name()+" is worn ("+usesRemaining()+"%)"; else if(usesRemaining()>50) return name()+" is damaged ("+usesRemaining()+"%)"; else if(usesRemaining()>25) return name()+" is heavily damaged ("+usesRemaining()+"%)"; else return name()+" is so damaged, it is practically harmless ("+usesRemaining()+"%)"; } @Override public String missString() { return CMLib.combat().standardMissString(weaponDamageType,weaponClassification,name(),useExtendedMissString); } @Override public String hitString(final int damageAmount) { return CMLib.combat().standardHitString(weaponDamageType, weaponClassification,damageAmount,name()); } @Override public int minRange() { if(CMath.bset(phyStats().sensesMask(),PhyStats.SENSE_ITEMNOMINRANGE)) return 0; return minRange; } @Override public int maxRange() { if(CMath.bset(phyStats().sensesMask(),PhyStats.SENSE_ITEMNOMAXRANGE)) return 100; return maxRange; } @Override public void setRanges(final int min, final int max) { minRange = min; maxRange = max; } @Override public int[] getRanges() { return new int[] { minRange, maxRange }; } @Override public boolean requiresAmmunition() { if((ammunitionType()==null)||(this instanceof Wand)) return false; return ammunitionType().length()>0 && (ammunitionCapacity()>0); } @Override public void setAmmunitionType(final String ammo) { if(!(this instanceof Wand)) setReadableText(ammo); } @Override public String ammunitionType() { return readableText(); } @Override public int ammunitionRemaining() { return usesRemaining(); } @Override public void setAmmoRemaining(int amount) { final int oldAmount=ammunitionRemaining(); if(amount==Integer.MAX_VALUE) amount=20; setUsesRemaining(amount); if((oldAmount>0) &&(amount==0) &&(ammunitionCapacity()>0)) { boolean recover=false; for(final Enumeration<Ability> a=effects();a.hasMoreElements();) { final Ability A=a.nextElement(); if((A!=null)&&(!A.isSavable())&&(A.invoker()==null)) { recover=true; delEffect(A); } } if(recover) recoverOwner(); } } @Override public int ammunitionCapacity() { return ammoCapacity; } @Override public void setAmmoCapacity(final int amount) { ammoCapacity = amount; } @Override public int value() { if((subjectToWearAndTear())&&(usesRemaining()<1000)) return (int)Math.round(CMath.mul(super.value(),CMath.div(usesRemaining(),100))); return super.value(); } @Override public boolean subjectToWearAndTear() { return((!requiresAmmunition()) &&(!(this instanceof Wand)) &&(usesRemaining()<=1000) &&(usesRemaining()>=0)); } public void recoverOwner() { final ItemPossessor myOwner=owner; if(myOwner instanceof MOB) { ((MOB)myOwner).recoverCharStats(); ((MOB)myOwner).recoverMaxState(); ((MOB)myOwner).recoverPhyStats(); } else if(myOwner!=null) myOwner.recoverPhyStats(); } }
com/planet_ink/coffee_mud/Items/Weapons/StdSiegeWeapon.java
package com.planet_ink.coffee_mud.Items.Weapons; import java.util.*; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.CMSecurity.DbgFlag; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.Basic.StdRideable; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; /* Copyright 2016-2021 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class StdSiegeWeapon extends StdRideable implements AmmunitionWeapon, SiegableItem { @Override public String ID() { return "StdSiegeWeapon"; } protected int weaponDamageType = TYPE_PIERCING; protected int weaponClassification = CLASS_RANGED; protected boolean useExtendedMissString = false; protected int minRange = 0; protected int maxRange = 10; protected int ammoCapacity = 1; protected volatile int nextTacticalMoveDir = -1; protected volatile int lastSpamCt = 0; protected volatile String lastSpamMsg = ""; protected PairList<MOB, Long> otherUsers = new PairVector<MOB, Long>(); protected SiegableItem siegeTarget = null; protected Room siegeCombatRoom = null; protected PairList<Item, int[]> coordinates = null; protected volatile int[] aiming = null; public StdSiegeWeapon() { super(); setName("a siege weapon bow"); setDisplayText("a siege weapon is mounted here."); setDescription("It looks like it might fire special ammunition"); basePhyStats().setAbility(0); basePhyStats().setLevel(0); basePhyStats.setWeight(500); basePhyStats().setAttackAdjustment(0); basePhyStats().setDamage(20); //basePhyStats().setSensesMask(basePhyStats().sensesMask()|PhyStats.SENSE_ITEMNOTGET); setAmmunitionType("spears"); setAmmoCapacity(1); setAmmoRemaining(1); baseGoldValue=15000; recoverPhyStats(); minRange=1; maxRange=10; setRiderCapacity(0); weaponDamageType=Weapon.TYPE_PIERCING; material=RawMaterial.RESOURCE_WOOD; weaponClassification=Weapon.CLASS_RANGED; properWornBitmap=0; wornLogicalAnd = false; } @Override public int weaponDamageType() { return weaponDamageType; } @Override public int weaponClassification() { return weaponClassification; } @Override public void setWeaponDamageType(final int newType) { weaponDamageType = newType; } @Override public void setWeaponClassification(final int newClassification) { weaponClassification = newClassification; } @Override public boolean isFreeStanding() { return true; } @Override public String secretIdentity() { String id=super.secretIdentity(); if(phyStats().ability()>0) id=name()+" +"+phyStats().ability()+((id.length()>0)?"\n":"")+id; else if(phyStats().ability()<0) id=name()+" "+phyStats().ability()+((id.length()>0)?"\n":"")+id; return id+"\n\rAttack: "+phyStats().attackAdjustment()+", Damage: "+phyStats().damage(); } @Override public void recoverPhyStats() { super.recoverPhyStats(); if(phyStats().damage()!=0) { final int ability=super.wornLogicalAnd ? (phyStats().ability()*CMath.numberOfSetBits(super.myWornCode)) : phyStats().ability(); phyStats().setDamage(phyStats().damage()+(ability*2)); phyStats().setAttackAdjustment(phyStats().attackAdjustment()+(ability*10)); } if((subjectToWearAndTear())&&(usesRemaining()<100)) phyStats().setDamage(((int)Math.round(CMath.mul(phyStats().damage(),CMath.div(usesRemaining(),100))))); } @Override public void setRangeToTarget(final int newRange) { //nothing to do atm } protected int getDirectionToTarget(final PhysicalAgent dirTarget) { if((dirTarget != null)&&(dirTarget instanceof SiegableItem)) { final SiegableItem siegeTarget = this.siegeTarget; final int[] targetCoords = siegeTarget.getTacticalCoords(); final int[] myCoords = this.getTacticalCoords(); if((myCoords!=null)&&(targetCoords != null)) return Directions.getRelative11Directions(myCoords, targetCoords); } return -1; } @Override public final int getMaxHullPoints() { return 12; } @Override public int rangeToTarget() { return getTacticalDistance(siegeTarget); } @Override public boolean mayPhysicallyAttack(final PhysicalAgent victim) { if(!mayIFight(victim)) return false; return CMLib.map().roomLocation(this) == CMLib.map().roomLocation(victim); } @Override public boolean isInCombat() { final Physical siegeTarget=this.siegeTarget; if((siegeTarget != null)&& (siegeCombatRoom != null)) { if(siegeTarget.amDestroyed()) { clearTacticalModeInternal(); return false; } return true; } return false; } @Override public boolean isDefeated() { return amDestroyed(); } @Override public boolean mayIFight(final PhysicalAgent victim) { final PhysicalAgent defender=victim; MOB mob = null; for(final Enumeration<Rider> r=riders();r.hasMoreElements();) { final Rider R=r.nextElement(); if(R instanceof MOB) mob=(MOB)R; } if(mob==null) return true; return CMLib.combat().mayIAttackThisVessel(mob, defender); } protected int[] getMagicCoords() { final Room R=CMLib.map().roomLocation(this); final int[] coords; //final int middle = (int)Math.round(Math.floor(R.maxRange() / 2.0)); final int extreme = R.maxRange()-1; final int extremeRandom = (extreme > 0) ? CMLib.dice().roll(1, R.maxRange(), -1) : 0; final int extremeRandom2 = (extreme > 0) ? CMLib.dice().roll(1, R.maxRange(), -1) : 0; coords = new int[] {extremeRandom, extremeRandom2}; return coords; } protected int getTacticalDistance(final SiegableItem targetI) { if(targetI==null) return CMLib.map().roomLocation(this).maxRange() + 1; final int[] fromCoords = this.getTacticalCoords(); final PairList<Item,int[]> coords = this.getCombatField(); // might not yet be set. int lowest = Integer.MAX_VALUE; if((coords != null) && (fromCoords != null)) { final int p = coords.indexOfFirst(targetI); if(p >=0) { final Pair<Item,int[]> P=coords.get(p); final int distance = (int)Math.round(Math.ceil(Math.sqrt(Math.pow(P.second[0]-fromCoords[0],2.0) + Math.pow(P.second[1]-fromCoords[1],2.0)))); if(distance < lowest) lowest=distance; } } if(lowest == Integer.MAX_VALUE) return CMLib.map().roomLocation(this).maxRange() + 1; return lowest; } protected boolean isAnyoneAtCoords(final int[] xy) { final PairList<Item, int[]> coords = this.coordinates; if(coords != null) { for(final Iterator<int[]> i = coords.secondIterator(); i.hasNext();) { if(Arrays.equals(xy, i.next())) return true; } } return false; } protected synchronized boolean amInTacticalMode() { final Item siegeTarget = this.siegeTarget; final Room siegeCombatRoom = this.siegeCombatRoom; if((siegeTarget != null) && (!siegeTarget.amDestroyed()) && (siegeCombatRoom != null) && (siegeCombatRoom.isContent(siegeTarget)) && (siegeCombatRoom.isContent(this)) ) { if(coordinates == null) { synchronized((""+siegeCombatRoom + "_SIEGE_TACTICAL").intern()) { for(int i=0;i<siegeCombatRoom.numItems();i++) { final Item I=siegeCombatRoom.getItem(i); if((I instanceof SiegableItem) &&(((SiegableItem)I).getCombatField() != null)) { this.coordinates = ((SiegableItem)I).getCombatField(); } } if(coordinates == null) { this.coordinates = new SPairList<Item,int[]>(); } } final PairList<Item,int[]> coords = this.coordinates; if(coords != null) { if(!coords.containsFirst(this)) { int[] newCoords = null; for(int i=0;i<10;i++) { newCoords = this.getMagicCoords(); if(!isAnyoneAtCoords(newCoords)) break; } coords.add(new Pair<Item,int[]>(this,newCoords)); } } } return true; } else { this.siegeTarget = null; this.siegeCombatRoom = null; this.coordinates = null; return false; } } protected void clearTacticalMode() { synchronized((""+siegeCombatRoom + "_SIEGE_TACTICAL").intern()) { final PairList<Item,int[]> coords = this.coordinates; if(coords != null) { coords.removeFirst(this); } } this.siegeTarget = null; this.siegeCombatRoom = null; this.coordinates = null; this.aiming = null; } protected synchronized void clearTacticalModeInternal() { final Room siegeCombatRoom = this.siegeCombatRoom; if(siegeCombatRoom != null) { PairList<Item,int[]> coords = null; synchronized((""+siegeCombatRoom + "_SIEGE_TACTICAL").intern()) { coords = this.coordinates; } clearTacticalMode(); if(coords != null) { for(final Iterator<Item> s = coords.firstIterator();s.hasNext();) { final Item I=s.next(); if((I instanceof SiegableItem) &&(((SiegableItem)I).getCombatant() == this)) ((SiegableItem)I).setCombatant(null); } } } this.otherUsers.clear(); CMLib.threads().deleteTick(this, Tickable.TICKID_SPECIALCOMBAT); } @Override public void makePeace(final boolean includePlayerFollowers) { clearTacticalModeInternal(); } @Override public PhysicalAgent getCombatant() { return this.siegeTarget; } @Override public void setCombatant(final PhysicalAgent other) { final Room R=(owner() instanceof Room)?(Room)owner():CMLib.map().roomLocation(this); if(other == null) clearTacticalModeInternal(); else { if(other instanceof SiegableItem) siegeTarget = (SiegableItem)other; if(R != null) siegeCombatRoom = R; if(other instanceof Combatant) { if(((Combatant)other).getCombatant()==null) ((Combatant)other).setCombatant(this); } amInTacticalMode(); // now he is in combat } } @Override public int[] getTacticalCoords() { final PairList<Item, int[]> coords = this.coordinates; if(coords != null) { for(final Iterator<Pair<Item,int[]>> i = coords.iterator(); i.hasNext();) { final Pair<Item,int[]> P=i.next(); if(P.first == this) return P.second; } } return null; } @Override public int getDirectionToTarget() { return this.getDirectionToTarget(this.siegeTarget); } @Override public PairList<Weapon,int[]> getSiegeWeaponAimings() { final PairList<Weapon, int[]> aimings = new PairVector<Weapon, int[]>(); if(aiming==null) return aimings; aimings.add(this, aiming); return aimings; } @Override public void destroy() { super.destroy(); CMLib.threads().deleteTick(this, Tickable.TICKID_SPECIALCOMBAT); } @Override public String getTacticalView(final SiegableItem viewer) { final int[] targetCoords = getTacticalCoords(); final int[] myCoords; final String dist = ""+getTacticalDistance(viewer); if(viewer instanceof PhysicalAgent) { myCoords = viewer.getTacticalCoords(); if((myCoords!=null)&&(targetCoords != null)) { final String dirFromYou = CMLib.directions().getDirectionName(Directions.getRelative11Directions(myCoords, targetCoords)); return L("@x1 is @x2 of you at a distance of @x3.",name(),dirFromYou,dist); } else return L("@x1 is at a distance of @x2.",name(),dist); } else return L("@x1 is at a distance of @x2.",name(),dist); } @Override public PairList<Item, int[]> getCombatField() { return coordinates; } @Override public void executeMsg(final Environmental myHost, final CMMsg msg) { super.executeMsg(myHost,msg); if(msg.amITarget(this)) { switch(msg.targetMinor()) { case CMMsg.TYP_LOOK: case CMMsg.TYP_EXAMINE: if(CMLib.flags().canBeSeenBy(this,msg.source())) { if(requiresAmmunition()) msg.source().tell(L("@x1 remaining: @x2/@x3.",CMStrings.capitalizeAndLower(ammunitionType()),""+ammunitionRemaining(),""+ammunitionCapacity())); if((subjectToWearAndTear())&&(usesRemaining()<100)) msg.source().tell(weaponHealth()); } break; case CMMsg.TYP_RELOAD: if(msg.tool() instanceof Ammunition) { boolean recover=false; final Ammunition I=(Ammunition)msg.tool(); int howMuchToTake=ammunitionCapacity(); if(I.ammunitionRemaining()<howMuchToTake) howMuchToTake=I.ammunitionRemaining(); if(this.ammunitionCapacity() - this.ammunitionRemaining() < howMuchToTake) howMuchToTake=this.ammunitionCapacity() - this.ammunitionRemaining(); setAmmoRemaining(this.ammunitionRemaining() + howMuchToTake); I.setAmmoRemaining(I.ammunitionRemaining()-howMuchToTake); final LinkedList<Ability> removeThese=new LinkedList<Ability>(); for(final Enumeration<Ability> a=effects();a.hasMoreElements();) { final Ability A=a.nextElement(); if((A!=null)&&(!A.isSavable())&&(A.invoker()==null)) removeThese.add(A); } for(final Ability A : removeThese) delEffect(A); for(final Enumeration<Ability> a=I.effects();a.hasMoreElements();) { Ability A=a.nextElement(); if((A!=null)&&(A.isSavable())&&(fetchEffect(A.ID())==null)) { A=(Ability)A.copyOf(); A.setInvoker(null); A.setSavable(false); addEffect(A); recover=true; } } if(I.ammunitionRemaining()<=0) I.destroy(); if(recover) recoverOwner(); } break; case CMMsg.TYP_UNLOAD: if(msg.tool() instanceof Ammunition) { final Ammunition ammo=(Ammunition)msg.tool(); for(final Enumeration<Ability> a=effects();a.hasMoreElements();) { final Ability A=a.nextElement(); if((A!=null)&&(!A.isSavable())&&(A.invoker()==null)) { final Ability ammoA=(Ability)A.copyOf(); ammo.addNonUninvokableEffect(ammoA); } } setAmmoRemaining(0); final Room R=msg.source().location(); if(R!=null) { R.addItem(ammo, ItemPossessor.Expire.Player_Drop); CMLib.commands().postGet(msg.source(), null, ammo, true); } } break; case CMMsg.TYP_DAMAGE: if(msg.value() > 0) { int level = phyStats().level(); if(level < 10) level = 10; final double pctLoss = CMath.div(msg.value(), level) * 10.0; // siege weapons against rideables is harsh final int pointsLost = (int)Math.round(pctLoss * level); if(pointsLost > 0) { final int weaponType = (msg.tool() instanceof Weapon) ? ((Weapon)msg.tool()).weaponDamageType() : Weapon.TYPE_BASHING; final String hitWord = CMLib.combat().standardHitWord(weaponType, pctLoss); final String msgStr = (msg.targetMessage() == null) ? L("<O-NAME> fired from <S-NAME> hits and @x1 @x2.",hitWord,name()) : msg.targetMessage(); final CMMsg deckHitMsg=CMClass.getMsg(msg.source(), this, msg.tool(),CMMsg.MSG_OK_ACTION, msgStr); final Room targetRoom=CMLib.map().roomLocation(this); if(targetRoom.okMessage(msg.source(), deckHitMsg)) targetRoom.send(msg.source(), deckHitMsg); if(pointsLost >= this.usesRemaining()) { this.setUsesRemaining(0); this.recoverPhyStats(); // takes away the swimmability! final Room shipR=CMLib.map().roomLocation(this); if(shipR!=null) { final String sinkString = L("<T-NAME> <T-IS-ARE> destroyed!"); shipR.show(msg.source(), this, CMMsg.MSG_OK_ACTION, sinkString); this.destroy(); } } else { this.setUsesRemaining(this.usesRemaining() - pointsLost); } } } break; } } else if((msg.tool()==this) &&(msg.targetMinor()==CMMsg.TYP_WEAPONATTACK) &&(weaponClassification()==Weapon.CLASS_THROWN)) msg.addTrailerMsg(CMClass.getMsg(msg.source(),this,CMMsg.MSG_DROP,null)); if((msg.targetMinor()==CMMsg.TYP_DAMAGE) &&(msg.target()==this) &&(msg.value()>0) &&(subjectToWearAndTear()) &&((!CMLib.flags().isABonusItems(this))||(CMLib.dice().rollPercentage() > phyStats().level())) &&((material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_ENERGY) &&((material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_GAS)) { CMLib.combat().postItemDamage(msg.source(), this, null, 1, CMMsg.TYP_JUSTICE, null); } } protected MOB getFactoryAttacker(final Room thisRoom) { final MOB mob = CMClass.getFactoryMOB(name(),phyStats().level(),thisRoom); mob.setRiding(this); for(final MOB M : this.getPlayerAttackers()) { for(final Pair<Clan,Integer> C : M.clans()) { if(mob.getClanRole(C.first.clanID())==null) mob.setClan(C.first.clanID(), C.second.intValue()); } } return mob; } protected Boolean startAttack(final MOB sourceM, final Room thisRoom, final String rest) { final Item I=thisRoom.findItem(rest); if((I instanceof SiegableItem) &&(I!=this) &&(CMLib.flags().canBeSeenBy(I, sourceM))) { if(!sourceM.mayPhysicallyAttack(I)) { sourceM.tell(L("You are not permitted to attack @x1",I.name())); return Boolean.FALSE; } final MOB mob = getFactoryAttacker(thisRoom); try { final CMMsg maneuverMsg=CMClass.getMsg(mob,I,null,CMMsg.MSG_ADVANCE,null,CMMsg.MASK_MALICIOUS|CMMsg.MSG_ADVANCE,null,CMMsg.MSG_ADVANCE,L("<S-NAME> engage(s) @x1.",I.Name())); if(thisRoom.okMessage(mob, maneuverMsg)) { thisRoom.send(mob, maneuverMsg); siegeTarget = (SiegableItem)I; siegeCombatRoom = thisRoom; if(I instanceof SiegableItem) { final SiegableItem otherI=(SiegableItem)I; if(otherI.getCombatant() == null) otherI.setCombatant(this); } amInTacticalMode(); // now he is in combat CMLib.threads().startTickDown(this, Tickable.TICKID_SPECIALCOMBAT, CombatLibrary.TICKS_PER_SHIP_COMBAT); //also support ENGAGE <name> as an alternative to attack? return Boolean.TRUE; } } finally { mob.destroy(); } } return null; } protected void addPlayerAttacker(final MOB M) { if((!(owner() instanceof Room)) ||(M==null) ||(!M.isPlayer())) return; final Room R=(Room)owner(); synchronized(this.otherUsers) { final long expire = System.currentTimeMillis() - 300000; for(final Iterator<Pair<MOB, Long>> p = this.otherUsers.iterator();p.hasNext();) { final Pair<MOB, Long> P = p.next(); if(P.first == M) { P.second=Long.valueOf(System.currentTimeMillis()); return; } else if(P.first.location()!=R) p.remove(); else if(expire > P.second.longValue()) p.remove(); } this.otherUsers.add(new Pair<MOB, Long>(M,Long.valueOf(System.currentTimeMillis()))); } } protected List<MOB> getPlayerAttackers() { final List<MOB> players=new LinkedList<MOB>(); if(!(owner() instanceof Room)) return players; final Room R=(Room)owner(); synchronized(this.otherUsers) { final long expire = System.currentTimeMillis() - 300000; for(final Iterator<Pair<MOB, Long>> p = this.otherUsers.iterator();p.hasNext();) { final Pair<MOB, Long> P = p.next(); if(P.first.location()!=R) p.remove(); else if(expire > P.second.longValue()) p.remove(); else players.add(P.first); } } for(final Enumeration<Rider> r=riders();r.hasMoreElements();) { final Rider rR=r.nextElement(); if((rR instanceof MOB) &&(((MOB)rR).location()==R) &&(!players.contains(rR))) players.add((MOB)rR); } return players; } public void announceToUsers(final String msgStr) { for(final MOB M : this.getPlayerAttackers()) M.tell(msgStr); } public boolean tick(final Tickable ticking, final int tickID) { if(tickID == Tickable.TICKID_SPECIALCOMBAT) { if(this.amInTacticalMode()) { final int direction = this.nextTacticalMoveDir; if(direction >= 0) { this.nextTacticalMoveDir = -1; final Room thisRoom=CMLib.map().roomLocation(this); if((thisRoom != null) && this.amInTacticalMode()) { int[] tacticalCoords = null; int x=0; try { while((x>=0)&&(this.coordinates!=null)&&(tacticalCoords==null)) { x=this.coordinates.indexOfFirst(this); final Pair<Item,int[]> pair = (x>=0) ? this.coordinates.get(x) : null; if(pair == null) break; else if(pair.first != this) x=this.coordinates.indexOfFirst(this); else tacticalCoords = pair.second; } } catch(final Exception e) { } if(tacticalCoords != null) { final MOB mob = this.getFactoryAttacker(thisRoom); try { final String directionName = CMLib.directions().getDirectionName(direction).toLowerCase(); final int[] newCoords = Directions.adjustXYByDirections(tacticalCoords[0], tacticalCoords[1], direction); final CMMsg maneuverMsg=CMClass.getMsg(mob, thisRoom, null, CMMsg.MSG_ADVANCE,newCoords[0]+","+newCoords[1], CMMsg.MSG_ADVANCE,directionName, CMMsg.MSG_ADVANCE,L("<S-NAME> maneuver(s) @x1.",directionName)); if(thisRoom.okMessage(mob, maneuverMsg)) { thisRoom.send(mob, maneuverMsg); tacticalCoords[0] = newCoords[0]; tacticalCoords[1] = newCoords[1]; if(CMSecurity.isDebugging(DbgFlag.SIEGECOMBAT)) Log.debugOut("SiegeCombat: "+Name()+" maneuvers to "+CMParms.toListString(tacticalCoords)); } } finally { mob.destroy(); } } } } final MOB mob = getFactoryAttacker(null); final int[] coordsToHit; final SiegableItem siegeTarget; synchronized(this) { siegeTarget=this.siegeTarget; } coordsToHit = siegeTarget.getTacticalCoords(); try { int notLoaded = 0; int notAimed = 0; final int[] aiming=this.aiming; final Weapon w=this; final Room R=CMLib.map().roomLocation(w); if(R!=null) { mob.setLocation(R); if((w instanceof AmmunitionWeapon) &&(((AmmunitionWeapon)w).requiresAmmunition()) &&(((AmmunitionWeapon)w).ammunitionRemaining() <=0)) notLoaded++; else if(aiming!=null) { final boolean wasHit = Arrays.equals(aiming, coordsToHit); CMLib.combat().postSiegeAttack(mob, this, siegeTarget, w, wasHit); if(CMSecurity.isDebugging(DbgFlag.SIEGECOMBAT)) { final String targetedName=siegeTarget!=null?siegeTarget.Name():"Unknown"; Log.debugOut("SiegeCombat: "+Name()+" aimed "+w.Name()+" at "+CMParms.toListString(aiming) +" and "+(wasHit?"hit ":"missed ")+targetedName+" at "+CMParms.toListString(coordsToHit)); } this.aiming=null; // reset for next attack } else notAimed++; } final String spamMsg; if((notLoaded > 0) && (notAimed > 0)) spamMsg = L("@x1 was not loaded and not aimed.",name()); else if(notLoaded > 0) spamMsg = L("@x1 was not loaded.",name()); else if(notAimed > 0) spamMsg = L("@x1 was not aimed.",name()); else spamMsg = ""; if(spamMsg.length()>0) { if(spamMsg.equals(lastSpamMsg)) { if(lastSpamCt < 3) { if(CMSecurity.isDebugging(DbgFlag.SIEGECOMBAT)) { final String targetedName=siegeTarget!=null?siegeTarget.Name():"Unknown"; Log.debugOut("SiegeCombat: "+Name()+" targeted: "+targetedName+", status: "+spamMsg); } announceToUsers(spamMsg); lastSpamCt++; } } else { if(CMSecurity.isDebugging(DbgFlag.SIEGECOMBAT)) { final String targetedName=siegeTarget!=null?siegeTarget.Name():"Unknown"; Log.debugOut("SiegeCombat: "+Name()+" targeted: "+targetedName+", status: "+spamMsg); } announceToUsers(spamMsg); lastSpamCt=0; } } lastSpamMsg=spamMsg; } finally { mob.setRangeToTarget(0); mob.destroy(); } } else return false; } return super.tick(ticking, tickID); } @Override public boolean okMessage(final Environmental myHost, final CMMsg msg) { if(!super.okMessage(myHost,msg)) return false; if((msg.targetMinor()==CMMsg.TYP_WEAPONATTACK) &&(msg.tool()==this) &&(requiresAmmunition()) &&(ammunitionCapacity()>0)) { if(ammunitionRemaining()>ammunitionCapacity()) setAmmoRemaining(ammunitionCapacity()); if(ammunitionRemaining()<=0) return false; else setUsesRemaining(usesRemaining()-1); } else if((msg.sourceMinor()==CMMsg.TYP_HUH) &&(msg.targetMessage()!=null) &&(owner() instanceof Room) &&(!(((Room)owner()).getArea() instanceof BoardableItem))) { final List<String> cmds=CMParms.parse(msg.targetMessage()); if(cmds.size()<1) return true; final String word=cmds.get(0).toUpperCase(); // MUST IMPLEMENT AIM, since your target might be moving. if("TARGET".startsWith(word)) { final boolean isRiding=msg.source().riding()==this; if((cmds.size()==1) ||((!isRiding)&&(cmds.size()<3))) { if(isRiding) msg.source().tell(L("You must specify a target.")); else msg.source().tell(L("You must which weapon to target, and at what.")); return false; } final Room thisRoom = (Room)owner(); if(thisRoom==null) { msg.source().tell(L("@x1 is nowhere to be found!",name())); return false; } if(!isRiding) { final String what=cmds.get(1); if(msg.source().location().findItem(null, what)!=this) return true; cmds.remove(1); } for(final MOB M: msg.source().getGroupMembers(new HashSet<MOB>())) this.addPlayerAttacker(M); final String rest = CMParms.combine(cmds,1); final Boolean result = startAttack(msg.source(),thisRoom,rest); if(result == Boolean.TRUE) { if(this.siegeTarget != null) { msg.source().tell(L("You are now targeting @x1.",this.siegeTarget.Name())); msg.source().tell(this.siegeTarget.getTacticalView(this)); } return false; } else if(result == Boolean.FALSE) return false; else { msg.source().tell(L("You don't see '@x1' here to target",rest)); return false; } } else if("AIM".startsWith(word)) { if(!this.amInTacticalMode()) { msg.source().tell(L("You must be in tactical mode to aim.")); return false; } final boolean isRiding=msg.source().riding()==this; if((cmds.size()==1) ||((!isRiding)&&(cmds.size()<3))) { if(isRiding) msg.source().tell(L("You must specify an amount to lead the target.")); else msg.source().tell(L("You must which weapon to aim, and how far ahead of the target to aim it.")); return false; } final Room thisRoom = (Room)owner(); if(thisRoom==null) { msg.source().tell(L("@x1 is nowhere to be found!",name())); return false; } if(!isRiding) { final String what=cmds.get(1); if(msg.source().location().findItem(null, what)!=this) return true; cmds.remove(1); } for(final MOB M: msg.source().getGroupMembers(new HashSet<MOB>())) this.addPlayerAttacker(M); final String rest = CMParms.combine(cmds,1); if((!CMath.isInteger(rest))||(CMath.s_int(rest)<0)) { if(this.siegeTarget!=null) msg.source().tell(L("'@x1' is not a valid distance ahead of @x2 to fire.",rest,this.siegeTarget.name())); else msg.source().tell(L("'@x1' is not a valid distance.",rest)); return false; } int distance = maxRange(); int[] targetCoords = new int[2]; int leadAmt=0; if(this.siegeTarget instanceof SiegableItem) { targetCoords = this.siegeTarget.getTacticalCoords(); if(targetCoords == null) { msg.source().tell(L("You must be targeting an enemy to aim weapons.")); return false; } distance = rangeToTarget(); leadAmt = CMath.s_int(rest); final int direction; if(this.siegeTarget instanceof NavigableItem) direction = ((NavigableItem)this.siegeTarget).getDirectionFacing(); else direction = CMLib.dice().roll(1, Directions.NUM_DIRECTIONS(), -1); for(int i=0;i<leadAmt;i++) targetCoords = Directions.adjustXYByDirections(targetCoords[0], targetCoords[1], direction); } if((maxRange() < distance)||(minRange() > distance)) { if(CMSecurity.isDebugging(DbgFlag.SIEGECOMBAT)) Log.debugOut("SiegeCombat: "+Name()+" target is presently at distance of "+distance+", but "+Name()+" range is "+minRange()+" to "+maxRange()); msg.source().tell(L("Your target is presently at distance of @x1, but this weapons range is @x2 to @x3.", ""+distance,""+minRange(),""+maxRange())); return false; } if(requiresAmmunition() && (ammunitionCapacity() > 0) && (ammunitionRemaining() == 0)) { if(CMSecurity.isDebugging(DbgFlag.SIEGECOMBAT)) Log.debugOut("SiegeCombat: "+Name()+": "+Name()+" wasn't loaded, couldn't be aimed."); msg.source().tell(L("@x1 needs to be LOADed first.",Name())); return false; } final String timeToFire=""+(CMLib.threads().msToNextTick(this, Tickable.TICKID_SPECIALCOMBAT) / 1000); final String msgStr=L("<S-NAME> aim(s) <O-NAME> at <T-NAME> (@x1).",""+leadAmt); if(msg.source().isMonster() && aiming != null) { msg.source().tell(L("@x1 is already aimed.",Name())); return false; } final CMMsg msg2=CMClass.getMsg(msg.source(), siegeTarget, this, CMMsg.MSG_NOISYMOVEMENT, msgStr); if(thisRoom.okMessage(msg.source(), msg2)) { this.aiming = targetCoords; thisRoom.send(msg.source(), msg2); if(CMSecurity.isDebugging(DbgFlag.SIEGECOMBAT)) Log.debugOut("SiegeCombat: "+Name()+": aimed "+Name()+" at : "+CMParms.toListString(targetCoords)); if(!this.requiresAmmunition()) msg.source().tell(L("@x1 is now aimed and will be engage in @x2 seconds.",name(),timeToFire)); else msg.source().tell(L("@x1 is now aimed and will be fired in @x2 seconds.",name(),timeToFire)); } } } else if((msg.target()==this) &&((msg.targetMinor()==CMMsg.TYP_PUSH)||(msg.targetMinor()==CMMsg.TYP_PULL)) &&(msg.tool() instanceof Room) &&(this.amInTacticalMode()) &&(msg.value()>=0) &&(msg.value()<Directions.NUM_DIRECTIONS())) { for(final MOB M: msg.source().getGroupMembers(new HashSet<MOB>())) this.addPlayerAttacker(M); msg.setTool(null); // this is even better than cancelling it. msg.source().tell(L("<S-NAME> order(s) @x1 moved @x2.",name(msg.source()),CMLib.directions().getDirectionName(msg.value()).toLowerCase())); this.nextTacticalMoveDir=msg.value(); return false; } else if((msg.targetMinor()==CMMsg.TYP_LEAVE) &&(msg.target() instanceof Room) &&(msg.source().location()==owner()) &&(this.riding()!=null) &&(msg.source().riding()!=this) &&((msg.source().riding()!=null) ||(msg.source().numFollowers()>0) ||((msg.source() instanceof Rideable)&&((Rideable)msg.source()).numRiders()>0))) { final Set<Physical> grp=CMLib.tracking().getAllGroupRiders(msg.source(), msg.source().location()); if(grp.contains(this) &&(this.amInTacticalMode())) { for(final MOB M: msg.source().getGroupMembers(new HashSet<MOB>())) this.addPlayerAttacker(M); msg.source().tell(L("<S-NAME> order(s) @x1 moved @x2.",name(msg.source()),CMLib.directions().getDirectionName(msg.value()).toLowerCase())); this.nextTacticalMoveDir=msg.value(); return false; } } return true; } @Override public void setUsesRemaining(int newUses) { if(newUses==Integer.MAX_VALUE) newUses=100; super.setUsesRemaining(newUses); } protected String weaponHealth() { if(usesRemaining()>=100) return ""; else if(usesRemaining()>=95) return name()+" looks slightly used ("+usesRemaining()+"%)"; else if(usesRemaining()>=85) return name()+" is somewhat worn ("+usesRemaining()+"%)"; else if(usesRemaining()>=75) return name()+" is worn ("+usesRemaining()+"%)"; else if(usesRemaining()>50) return name()+" is damaged ("+usesRemaining()+"%)"; else if(usesRemaining()>25) return name()+" is heavily damaged ("+usesRemaining()+"%)"; else return name()+" is so damaged, it is practically harmless ("+usesRemaining()+"%)"; } @Override public String missString() { return CMLib.combat().standardMissString(weaponDamageType,weaponClassification,name(),useExtendedMissString); } @Override public String hitString(final int damageAmount) { return CMLib.combat().standardHitString(weaponDamageType, weaponClassification,damageAmount,name()); } @Override public int minRange() { if(CMath.bset(phyStats().sensesMask(),PhyStats.SENSE_ITEMNOMINRANGE)) return 0; return minRange; } @Override public int maxRange() { if(CMath.bset(phyStats().sensesMask(),PhyStats.SENSE_ITEMNOMAXRANGE)) return 100; return maxRange; } @Override public void setRanges(final int min, final int max) { minRange = min; maxRange = max; } @Override public int[] getRanges() { return new int[] { minRange, maxRange }; } @Override public boolean requiresAmmunition() { if((ammunitionType()==null)||(this instanceof Wand)) return false; return ammunitionType().length()>0 && (ammunitionCapacity()>0); } @Override public void setAmmunitionType(final String ammo) { if(!(this instanceof Wand)) setReadableText(ammo); } @Override public String ammunitionType() { return readableText(); } @Override public int ammunitionRemaining() { return usesRemaining(); } @Override public void setAmmoRemaining(int amount) { final int oldAmount=ammunitionRemaining(); if(amount==Integer.MAX_VALUE) amount=20; setUsesRemaining(amount); if((oldAmount>0) &&(amount==0) &&(ammunitionCapacity()>0)) { boolean recover=false; for(final Enumeration<Ability> a=effects();a.hasMoreElements();) { final Ability A=a.nextElement(); if((A!=null)&&(!A.isSavable())&&(A.invoker()==null)) { recover=true; delEffect(A); } } if(recover) recoverOwner(); } } @Override public int ammunitionCapacity() { return ammoCapacity; } @Override public void setAmmoCapacity(final int amount) { ammoCapacity = amount; } @Override public int value() { if((subjectToWearAndTear())&&(usesRemaining()<1000)) return (int)Math.round(CMath.mul(super.value(),CMath.div(usesRemaining(),100))); return super.value(); } @Override public boolean subjectToWearAndTear() { return((!requiresAmmunition()) &&(!(this instanceof Wand)) &&(usesRemaining()<=1000) &&(usesRemaining()>=0)); } public void recoverOwner() { final ItemPossessor myOwner=owner; if(myOwner instanceof MOB) { ((MOB)myOwner).recoverCharStats(); ((MOB)myOwner).recoverMaxState(); ((MOB)myOwner).recoverPhyStats(); } else if(myOwner!=null) myOwner.recoverPhyStats(); } }
safety checkin git-svn-id: 0cdf8356e41b2d8ccbb41bb76c82068fe80b2514@20892 0d6f1817-ed0e-0410-87c9-987e46238f29
com/planet_ink/coffee_mud/Items/Weapons/StdSiegeWeapon.java
safety checkin
Java
apache-2.0
f38876c1753e09d9fc73126776267cd0ccba01a2
0
ninowalker/jmemcache-daemon
/** * Copyright 2008 ThimbleWare Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thimbleware.jmemcached; import org.apache.mina.common.ByteBuffer; import static java.lang.Integer.parseInt; import static java.lang.String.valueOf; import java.util.Set; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.DelayQueue; import java.util.concurrent.Delayed; import java.util.concurrent.TimeUnit; /** */ public class Cache { private int currentItems; private int totalItems; private int getCmds; private int setCmds; private int getHits; private int getMisses; private long casCounter; protected CacheStorage cacheStorage; private DelayQueue<DelayedMCElement> deleteQueue; private final ReadWriteLock deleteQueueReadWriteLock; public enum StoreResponse { STORED, NOT_STORED, EXISTS, NOT_FOUND } public enum DeleteResponse { DELETED, NOT_FOUND } /** * Read-write lock allows maximal concurrency, since readers can share access; * only writers need sole access. */ private final ReadWriteLock cacheReadWriteLock; /** * Delayed key blocks get processed occasionally. */ private class DelayedMCElement implements Delayed { private MCElement element; public DelayedMCElement(MCElement element) { this.element = element; } public long getDelay(TimeUnit timeUnit) { return timeUnit.convert(element.blocked_until - Now(), TimeUnit.MILLISECONDS); } public int compareTo(Delayed delayed) { if (!(delayed instanceof DelayedMCElement)) return -1; else return element.keystring.compareTo(((DelayedMCElement)delayed).element.keystring); } } /** * Construct the server session handler * * @param cacheStorage the cache to use */ public Cache(CacheStorage cacheStorage) { initStats(); this.cacheStorage = cacheStorage; this.deleteQueue = new DelayQueue<DelayedMCElement>(); cacheReadWriteLock = new ReentrantReadWriteLock(); deleteQueueReadWriteLock = new ReentrantReadWriteLock(); } /** * Handle the deletion of an item from the cache. * * @param key the key for the item * @param time an amount of time to block this entry in the cache for further writes * @return the message response */ public DeleteResponse delete(String key, int time) { try { startCacheWrite(); if (isThere(key)) { if (time != 0) { // mark it as blocked MCElement el = this.cacheStorage.get(key); el.blocked = true; el.blocked_until = Now() + time; // actually clear the data since we don't need to keep it el.data_length = 0; el.data = new byte[0]; this.cacheStorage.put(key, el, el.data_length); // this must go on a queue for processing later... try { deleteQueueReadWriteLock.writeLock().lock(); deleteQueue.add(new DelayedMCElement(el)); } finally { deleteQueueReadWriteLock.writeLock().unlock(); } } else { this.cacheStorage.remove(key); // just remove it } return DeleteResponse.DELETED; } else { return DeleteResponse.NOT_FOUND; } } finally { finishCacheWrite(); } } /** * Executed periodically to clean from the cache those entries that are just blocking * the insertion of new ones. */ public void processDeleteQueue() { try { deleteQueueReadWriteLock.writeLock().lock(); DelayedMCElement toDelete = deleteQueue.poll(); if (toDelete != null) { try { startCacheWrite(); if (this.cacheStorage.get(toDelete.element.keystring) != null) { this.cacheStorage.remove(toDelete.element.keystring); } } finally { finishCacheWrite(); } } } finally { deleteQueueReadWriteLock.writeLock().unlock(); } } /** * Add an element to the cache * * @param e the element to add * @return the store response code */ public StoreResponse add(MCElement e) { try { startCacheWrite(); if (!isThere(e.keystring)) return set(e); else return StoreResponse.NOT_STORED; } finally { finishCacheWrite(); } } /** * Replace an element in the cache * * @param e the element to replace * @return the store response code */ public StoreResponse replace(MCElement e) { try { startCacheWrite(); if (isThere(e.keystring)) return set(e); else return StoreResponse.NOT_STORED; } finally { finishCacheWrite(); } } /** * Append bytes to the end of an element in the cache * * @param element the element to append * @return the store response code */ public StoreResponse append(MCElement element) { try { startCacheWrite(); MCElement ret = get(element.keystring); if (ret == null || isBlocked(ret) || isExpired(ret)) return StoreResponse.NOT_FOUND; else { ret.data_length += element.data_length; ByteBuffer b = ByteBuffer.allocate(ret.data_length); b.put(ret.data); b.put(element.data); ret.data = new byte[ret.data_length]; b.flip(); b.get(ret.data); ret.cas_unique++; this.cacheStorage.put(ret.keystring, ret, ret.data_length); return StoreResponse.STORED; } } finally { finishCacheWrite(); } } /** * Prepend bytes to the end of an element in the cache * * @param element the element to append * @return the store response code */ public StoreResponse prepend(MCElement element) { try { startCacheWrite(); MCElement ret = get(element.keystring); if (ret == null || isBlocked(ret) || isExpired(ret)) return StoreResponse.NOT_FOUND; else { ret.data_length += element.data_length; ByteBuffer b = ByteBuffer.allocate(ret.data_length); b.put(element.data); b.put(ret.data); ret.data = new byte[ret.data_length]; b.flip(); b.get(ret.data); ret.cas_unique++; this.cacheStorage.put(ret.keystring, ret, ret.data_length); return StoreResponse.STORED; } } finally { finishCacheWrite(); } } /** * Set an element in the cache * * @param e the element to set * @return the store response code */ public StoreResponse set(MCElement e) { try { startCacheWrite(); setCmds += 1;//update stats // increment the CAS counter; put in the new CAS e.cas_unique = casCounter++; this.cacheStorage.put(e.keystring, e, e.data_length); return StoreResponse.STORED; } finally { finishCacheWrite(); } } /** * Set an element in the cache but only if the element has not been touched * since the last 'gets' * @param cas_key the cas key returned by the last gets * @param e the element to set * @return the store response code */ public StoreResponse cas(Long cas_key, MCElement e) { try { startCacheWrite(); // have to get the element MCElement element = get(e.keystring); if (element == null || isBlocked(element)) return StoreResponse.NOT_FOUND; if (element.cas_unique == cas_key) { // cas_unique matches, now set the element return set(e); } else { // cas didn't match; someone else beat us to it return StoreResponse.EXISTS; } } finally { finishCacheWrite(); } } /** * Increment an (integer) element inthe cache * @param key the key to increment * @param mod the amount to add to the value * @return the message response */ public Integer get_add(String key, int mod) { try { startCacheWrite(); MCElement e = this.cacheStorage.get(key); if (e == null) { getMisses += 1;//update stats return null; } if (isExpired(e) || e.blocked) { //logger.info("FOUND BUT EXPIRED"); getMisses += 1;//update stats return null; } // TODO handle parse failure! int old_val = parseInt(new String(e.data)) + mod; // change value if (old_val < 0) { old_val = 0; } // check for underflow e.data = valueOf(old_val).getBytes(); // toString e.data_length = e.data.length; // assign new cas id e.cas_unique = casCounter++; this.cacheStorage.put(e.keystring, e, e.data_length); // save new value return old_val; } finally { finishCacheWrite(); } } /** * Check whether an element is in the cache and non-expired and the slot is non-blocked * @param key the key for the element to lookup * @return whether the element is in the cache and is live */ protected boolean isThere(String key) { try { startCacheRead(); MCElement e = this.cacheStorage.get(key); return e != null && !isExpired(e) && !isBlocked(e); } finally { finishCacheRead(); } } protected boolean isBlocked(MCElement e) { return e.blocked && e.blocked_until > Now(); } protected boolean isExpired(MCElement e) { return e.expire != 0 && e.expire < Now(); } /** * Get an element from the cache * @param key the key for the element to lookup * @return the element, or 'null' in case of cache miss. */ public MCElement get(String key) { getCmds += 1;//updates stats try { startCacheRead(); MCElement e = this.cacheStorage.get(key); if (e == null) { getMisses += 1;//update stats return null; } if (isExpired(e) || e.blocked) { getMisses += 1;//update stats return null; } getHits += 1;//update stats return e; } finally { finishCacheRead(); } } /** * Flush all cache entries * @return command response */ public boolean flush_all() { return flush_all(0); } /** * Flush all cache entries with a timestamp after a given expiration time * @param expire the flush time in seconds * @return command response */ public boolean flush_all(int expire) { // TODO implement this, it isn't right... but how to handle efficiently? (don't want to linear scan entire cacheStorage) try { startCacheWrite(); this.cacheStorage.clear(); } finally { finishCacheWrite(); } return true; } /** * @return the current time in seconds (from epoch), used for expiries, etc. */ protected final int Now() { return (int) (System.currentTimeMillis() / 1000); } /** * Initialize all statistic counters */ protected void initStats() { currentItems = 0; totalItems = 0; getCmds = setCmds = getHits = getMisses = 0; } public Set<String> keys() { try { startCacheRead(); return cacheStorage.keys(); } finally { finishCacheRead(); } } public long getCurrentItems() { try { startCacheRead(); return this.cacheStorage.count(); } finally { finishCacheRead(); } } public long getLimitMaxBytes() { try { startCacheRead(); return this.cacheStorage.getMaximumSize(); } finally { finishCacheRead(); } } public long getCurrentBytes() { try { startCacheRead(); return this.cacheStorage.getSize(); } finally { finishCacheRead(); } } /** * Blocks of code in which the contents of the cache * are examined in any way must be surrounded by calls to <code>startRead</code> * and <code>finishRead</code>. See documentation for ReadWriteLock. */ private void startCacheRead() { cacheReadWriteLock.readLock().lock(); } /** * Blocks of code in which the contents of the cache * are examined in any way must be surrounded by calls to <code>startRead</code> * and <code>finishRead</code>. See documentation for ReadWriteLock. */ private void finishCacheRead() { cacheReadWriteLock.readLock().unlock(); } /** * Blocks of code in which the contents of the cache * are changed in any way must be surrounded by calls to <code>startWrite</code> and * <code>finishWrite</code>. See documentation for ReadWriteLock. * protect the higher layers from implementation details. */ private void startCacheWrite() { cacheReadWriteLock.writeLock().lock(); } /** * Blocks of code in which the contents of the cache * are changed in any way must be surrounded by calls to <code>startWrite</code> and * <code>finishWrite</code>. See documentation for ReadWriteLock. */ private void finishCacheWrite() { cacheReadWriteLock.writeLock().unlock(); } public int getTotalItems() { return totalItems; } public int getGetCmds() { return getCmds; } public int getSetCmds() { return setCmds; } public int getGetHits() { return getHits; } public int getGetMisses() { return getMisses; } }
jmemcached-core/src/main/java/com/thimbleware/jmemcached/Cache.java
/** * Copyright 2008 ThimbleWare Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thimbleware.jmemcached; import org.apache.mina.common.ByteBuffer; import static java.lang.Integer.parseInt; import static java.lang.String.valueOf; import java.util.Set; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.DelayQueue; import java.util.concurrent.Delayed; import java.util.concurrent.TimeUnit; /** */ public class Cache { private int currentItems; private int totalItems; private int getCmds; private int setCmds; private int getHits; private int getMisses; private long casCounter; protected CacheStorage cacheStorage; private DelayQueue<DelayedMCElement> deleteQueue; private final ReadWriteLock deleteQueueReadWriteLock; public enum StoreResponse { STORED, NOT_STORED, EXISTS, NOT_FOUND } public enum DeleteResponse { DELETED, NOT_FOUND } /** * Read-write lock allows maximal concurrency, since readers can share access; * only writers need sole access. */ private final ReadWriteLock cacheReadWriteLock; /** * Delayed key blocks get processed occasionally. */ private class DelayedMCElement implements Delayed { private MCElement element; public DelayedMCElement(MCElement element) { this.element = element; } public long getDelay(TimeUnit timeUnit) { return timeUnit.convert(element.blocked_until - Now(), TimeUnit.MILLISECONDS); } public int compareTo(Delayed delayed) { if (!(delayed instanceof DelayedMCElement)) return -1; else return element.keystring.compareTo(((DelayedMCElement)delayed).element.keystring); } } /** * Construct the server session handler * * @param cacheStorage the cache to use */ public Cache(CacheStorage cacheStorage) { initStats(); this.cacheStorage = cacheStorage; this.deleteQueue = new DelayQueue<DelayedMCElement>(); cacheReadWriteLock = new ReentrantReadWriteLock(); deleteQueueReadWriteLock = new ReentrantReadWriteLock(); } /** * Handle the deletion of an item from the cache. * * @param key the key for the item * @param time an amount of time to block this entry in the cache for further writes * @return the message response */ public DeleteResponse delete(String key, int time) { try { startCacheWrite(); if (isThere(key)) { if (time != 0) { // mark it as blocked MCElement el = this.cacheStorage.get(key); el.blocked = true; el.blocked_until = Now() + time; // actually clear the data since we don't need to keep it el.data_length = 0; el.data = new byte[0]; this.cacheStorage.put(key, el, el.data_length); // this must go on a queue for processing later... try { deleteQueueReadWriteLock.writeLock().lock(); deleteQueue.add(new DelayedMCElement(el)); } finally { deleteQueueReadWriteLock.writeLock().unlock(); } } else { this.cacheStorage.remove(key); // just remove it } return DeleteResponse.DELETED; } else { return DeleteResponse.NOT_FOUND; } } finally { finishCacheWrite(); } } /** * Executed periodically to clean from the cache those entries that are just blocking * the insertion of new ones. */ public void processDeleteQueue() { try { deleteQueueReadWriteLock.writeLock().lock(); DelayedMCElement toDelete = deleteQueue.poll(); if (toDelete != null) { try { startCacheWrite(); if (this.cacheStorage.get(toDelete.element.keystring) != null) { this.cacheStorage.remove(toDelete.element.keystring); } } finally { finishCacheWrite(); } } } finally { deleteQueueReadWriteLock.writeLock().unlock(); } } /** * Add an element to the cache * * @param e the element to add * @return the store response code */ public StoreResponse add(MCElement e) { try { startCacheWrite(); if (isThere(e.keystring)) return set(e); else return StoreResponse.NOT_STORED; } finally { finishCacheWrite(); } } /** * Replace an element in the cache * * @param e the element to replace * @return the store response code */ public StoreResponse replace(MCElement e) { try { startCacheWrite(); if (isThere(e.keystring)) return set(e); else return StoreResponse.NOT_STORED; } finally { finishCacheWrite(); } } /** * Append bytes to the end of an element in the cache * * @param element the element to append * @return the store response code */ public StoreResponse append(MCElement element) { try { startCacheWrite(); MCElement ret = get(element.keystring); if (ret == null || isBlocked(ret) || isExpired(ret)) return StoreResponse.NOT_FOUND; else { ret.data_length += element.data_length; ByteBuffer b = ByteBuffer.allocate(ret.data_length); b.put(ret.data); b.put(element.data); ret.data = new byte[ret.data_length]; b.flip(); b.get(ret.data); ret.cas_unique++; this.cacheStorage.put(ret.keystring, ret, ret.data_length); return StoreResponse.STORED; } } finally { finishCacheWrite(); } } /** * Prepend bytes to the end of an element in the cache * * @param element the element to append * @return the store response code */ public StoreResponse prepend(MCElement element) { try { startCacheWrite(); MCElement ret = get(element.keystring); if (ret == null || isBlocked(ret) || isExpired(ret)) return StoreResponse.NOT_FOUND; else { ret.data_length += element.data_length; ByteBuffer b = ByteBuffer.allocate(ret.data_length); b.put(element.data); b.put(ret.data); ret.data = new byte[ret.data_length]; b.flip(); b.get(ret.data); ret.cas_unique++; this.cacheStorage.put(ret.keystring, ret, ret.data_length); return StoreResponse.STORED; } } finally { finishCacheWrite(); } } /** * Set an element in the cache * * @param e the element to set * @return the store response code */ public StoreResponse set(MCElement e) { try { startCacheWrite(); setCmds += 1;//update stats // increment the CAS counter; put in the new CAS e.cas_unique = casCounter++; this.cacheStorage.put(e.keystring, e, e.data_length); return StoreResponse.STORED; } finally { finishCacheWrite(); } } /** * Set an element in the cache but only if the element has not been touched * since the last 'gets' * @param cas_key the cas key returned by the last gets * @param e the element to set * @return the store response code */ public StoreResponse cas(Long cas_key, MCElement e) { try { startCacheWrite(); // have to get the element MCElement element = get(e.keystring); if (element == null || isBlocked(element)) return StoreResponse.NOT_FOUND; if (element.cas_unique == cas_key) { // cas_unique matches, now set the element return set(e); } else { // cas didn't match; someone else beat us to it return StoreResponse.EXISTS; } } finally { finishCacheWrite(); } } /** * Increment an (integer) element inthe cache * @param key the key to increment * @param mod the amount to add to the value * @return the message response */ public Integer get_add(String key, int mod) { try { startCacheWrite(); MCElement e = this.cacheStorage.get(key); if (e == null) { getMisses += 1;//update stats return null; } if (isExpired(e) || e.blocked) { //logger.info("FOUND BUT EXPIRED"); getMisses += 1;//update stats return null; } // TODO handle parse failure! int old_val = parseInt(new String(e.data)) + mod; // change value if (old_val < 0) { old_val = 0; } // check for underflow e.data = valueOf(old_val).getBytes(); // toString e.data_length = e.data.length; // assign new cas id e.cas_unique = casCounter++; this.cacheStorage.put(e.keystring, e, e.data_length); // save new value return old_val; } finally { finishCacheWrite(); } } /** * Check whether an element is in the cache and non-expired and the slot is non-blocked * @param key the key for the element to lookup * @return whether the element is in the cache and is live */ protected boolean isThere(String key) { try { startCacheRead(); MCElement e = this.cacheStorage.get(key); return e != null && !isExpired(e) && !isBlocked(e); } finally { finishCacheRead(); } } protected boolean isBlocked(MCElement e) { return e.blocked && e.blocked_until > Now(); } protected boolean isExpired(MCElement e) { return e.expire != 0 && e.expire < Now(); } /** * Get an element from the cache * @param key the key for the element to lookup * @return the element, or 'null' in case of cache miss. */ public MCElement get(String key) { getCmds += 1;//updates stats try { startCacheRead(); MCElement e = this.cacheStorage.get(key); if (e == null) { getMisses += 1;//update stats return null; } if (isExpired(e) || e.blocked) { getMisses += 1;//update stats return null; } getHits += 1;//update stats return e; } finally { finishCacheRead(); } } /** * Flush all cache entries * @return command response */ public boolean flush_all() { return flush_all(0); } /** * Flush all cache entries with a timestamp after a given expiration time * @param expire the flush time in seconds * @return command response */ public boolean flush_all(int expire) { // TODO implement this, it isn't right... but how to handle efficiently? (don't want to linear scan entire cacheStorage) try { startCacheWrite(); this.cacheStorage.clear(); } finally { finishCacheWrite(); } return true; } /** * @return the current time in seconds (from epoch), used for expiries, etc. */ protected final int Now() { return (int) (System.currentTimeMillis() / 1000); } /** * Initialize all statistic counters */ protected void initStats() { currentItems = 0; totalItems = 0; getCmds = setCmds = getHits = getMisses = 0; } public Set<String> keys() { try { startCacheRead(); return cacheStorage.keys(); } finally { finishCacheRead(); } } public long getCurrentItems() { try { startCacheRead(); return this.cacheStorage.count(); } finally { finishCacheRead(); } } public long getLimitMaxBytes() { try { startCacheRead(); return this.cacheStorage.getMaximumSize(); } finally { finishCacheRead(); } } public long getCurrentBytes() { try { startCacheRead(); return this.cacheStorage.getSize(); } finally { finishCacheRead(); } } /** * Blocks of code in which the contents of the cache * are examined in any way must be surrounded by calls to <code>startRead</code> * and <code>finishRead</code>. See documentation for ReadWriteLock. */ private void startCacheRead() { cacheReadWriteLock.readLock().lock(); } /** * Blocks of code in which the contents of the cache * are examined in any way must be surrounded by calls to <code>startRead</code> * and <code>finishRead</code>. See documentation for ReadWriteLock. */ private void finishCacheRead() { cacheReadWriteLock.readLock().unlock(); } /** * Blocks of code in which the contents of the cache * are changed in any way must be surrounded by calls to <code>startWrite</code> and * <code>finishWrite</code>. See documentation for ReadWriteLock. * protect the higher layers from implementation details. */ private void startCacheWrite() { cacheReadWriteLock.writeLock().lock(); } /** * Blocks of code in which the contents of the cache * are changed in any way must be surrounded by calls to <code>startWrite</code> and * <code>finishWrite</code>. See documentation for ReadWriteLock. */ private void finishCacheWrite() { cacheReadWriteLock.writeLock().unlock(); } public int getTotalItems() { return totalItems; } public int getGetCmds() { return getCmds; } public int getSetCmds() { return setCmds; } public int getGetHits() { return getHits; } public int getGetMisses() { return getMisses; } }
Fix to logic error in ADD; new keys indicated with ADD were not being added.
jmemcached-core/src/main/java/com/thimbleware/jmemcached/Cache.java
Fix to logic error in ADD; new keys indicated with ADD were not being added.
Java
apache-2.0
69e3edb1d8dfa0363ebe57565765a5f1f5973daf
0
joshelser/cosmos,joshelser/cosmos,joshelser/cosmos
package sorts.results.integration; import java.io.File; import java.util.Collection; import java.util.List; import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.client.ZooKeeperInstance; import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.accumulo.core.security.Authorizations; import org.apache.accumulo.server.mini.MiniAccumuloCluster; import org.apache.accumulo.server.mini.MiniAccumuloConfig; import org.apache.commons.io.FileUtils; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mediawiki.xml.export_0.MediaWikiType; import sorts.Sorting; import sorts.impl.SortableResult; import sorts.impl.SortingImpl; import sorts.options.Defaults; import sorts.options.Index; import sorts.results.Column; import sorts.results.QueryResult; import sorts.results.SValue; import sorts.results.impl.MultimapQueryResult; import com.google.common.collect.Lists; import com.google.common.collect.Sets; /** * */ @Category(IntegrationTests.class) public class SortsIntegrationTest extends SortsIntegrationSetup { protected static MiniAccumuloCluster mac; protected static File macDir; @BeforeClass public static void createAccumuloCluster() throws Exception { macDir = File.createTempFile("miniaccumulocluster", null); Assert.assertTrue(macDir.delete()); Assert.assertTrue(macDir.mkdir()); macDir.deleteOnExit(); MiniAccumuloConfig config = new MiniAccumuloConfig(macDir, ""); config.setNumTservers(4); mac = new MiniAccumuloCluster(config); mac.start(); ZooKeeperInstance zk = new ZooKeeperInstance(mac.getInstanceName(), mac.getZooKeepers()); Connector c = zk.getConnector("root", new PasswordToken("")); // Add in auths for "en" c.securityOperations().changeUserAuthorizations("root", new Authorizations("en")); } @AfterClass public static void stopAccumuloCluster() throws Exception { mac.stop(); FileUtils.deleteDirectory(macDir); } @Test public void test() throws Exception { // Cache all of the wikis -- multithreaded loadAllWikis(); long start = System.currentTimeMillis(); // These should all be cached Assert.assertNotNull(getWiki1()); Assert.assertNotNull(getWiki2()); Assert.assertNotNull(getWiki3()); Assert.assertNotNull(getWiki4()); Assert.assertNotNull(getWiki5()); long end = System.currentTimeMillis(); Assert.assertTrue((end - start) < 10000); } @Test public void testWiki1() throws Exception { // Get the same wiki 3 times List<Thread> threads = Lists.newArrayList(); for (int i = 0; i < 3; i++) { threads.add(new Thread(new Runnable() { public void run() { try { getWiki1(); } catch (Exception e) { throw new RuntimeException(e); } } })); } long start = System.currentTimeMillis(); for (Thread t : threads) { t.start(); } for (Thread t : threads) { t.join(); } long end = System.currentTimeMillis(); // We should only have to wait on one to parse the xml Assert.assertTrue((end - start) < 8000); } @Test public void wiki1Test() throws Exception { MediaWikiType wiki1 = getWiki1(); List<QueryResult<?>> results = wikiToMultimap(wiki1); ZooKeeperInstance zk = new ZooKeeperInstance(mac.getInstanceName(), mac.getZooKeepers()); Connector con = zk.getConnector("root", new PasswordToken("")); con.tableOperations().create(Defaults.DATA_TABLE); con.tableOperations().create(Defaults.METADATA_TABLE); SortableResult id = SortableResult.create(con, new Authorizations("en"), Sets.newHashSet(Index.define(PAGE_ID))); Sorting s = new SortingImpl(); s.register(id); s.addResults(id, results); Column pageIdCol = Column.create(PAGE_ID); Iterable<MultimapQueryResult> newResults = s.fetch(id); Assert.assertNotNull(newResults); long count = 0; String prevPageId = ""; for (MultimapQueryResult res : newResults) { Collection<SValue> pageIds = res.get(pageIdCol); String currPageId = null; for (SValue pageId : pageIds) { if (null == currPageId) { currPageId = pageId.value(); } // If we have multiple pageIds for this record, choose the least one greater than the prev if (prevPageId.compareTo(pageId.value()) < 0 && pageId.value().compareTo(currPageId) < 0) { currPageId = pageId.value(); } } count++; } Assert.assertEquals(wiki1.getPage().size(), count); } }
src/test/java/sorts/results/integration/SortsIntegrationTest.java
package sorts.results.integration; import java.io.File; import java.util.Collection; import java.util.List; import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.client.ZooKeeperInstance; import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.accumulo.core.security.Authorizations; import org.apache.accumulo.core.security.ColumnVisibility; import org.apache.accumulo.test.MiniAccumuloCluster; import org.apache.accumulo.test.MiniAccumuloConfig; import org.apache.commons.io.FileUtils; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mediawiki.xml.export_0.MediaWikiType; import sorts.Sorting; import sorts.impl.SortableResult; import sorts.impl.SortingImpl; import sorts.options.Defaults; import sorts.options.Index; import sorts.results.Column; import sorts.results.QueryResult; import sorts.results.SValue; import sorts.results.impl.MultimapQueryResult; import com.google.common.collect.Lists; import com.google.common.collect.Sets; /** * */ @Category(IntegrationTests.class) public class SortsIntegrationTest extends SortsIntegrationSetup { protected static MiniAccumuloCluster mac; protected static File macDir; @BeforeClass public static void createAccumuloCluster() throws Exception { macDir = File.createTempFile("miniaccumulocluster", null); Assert.assertTrue(macDir.delete()); Assert.assertTrue(macDir.mkdir()); macDir.deleteOnExit(); MiniAccumuloConfig config = new MiniAccumuloConfig(macDir, ""); config.setNumTservers(4); mac = new MiniAccumuloCluster(config); mac.start(); ZooKeeperInstance zk = new ZooKeeperInstance(mac.getInstanceName(), mac.getZooKeepers()); Connector c = zk.getConnector("root", new PasswordToken("")); // Add in auths for "en" c.securityOperations().changeUserAuthorizations("root", new Authorizations("en")); } @AfterClass public static void stopAccumuloCluster() throws Exception { mac.stop(); FileUtils.deleteDirectory(macDir); } @Test public void test() throws Exception { // Cache all of the wikis -- multithreaded loadAllWikis(); long start = System.currentTimeMillis(); // These should all be cached Assert.assertNotNull(getWiki1()); Assert.assertNotNull(getWiki2()); Assert.assertNotNull(getWiki3()); Assert.assertNotNull(getWiki4()); Assert.assertNotNull(getWiki5()); long end = System.currentTimeMillis(); Assert.assertTrue((end - start) < 10000); } @Test public void testWiki1() throws Exception { // Get the same wiki 3 times List<Thread> threads = Lists.newArrayList(); for (int i = 0; i < 3; i++) { threads.add(new Thread(new Runnable() { public void run() { try { getWiki1(); } catch (Exception e) { throw new RuntimeException(e); } } })); } long start = System.currentTimeMillis(); for (Thread t : threads) { t.start(); } for (Thread t : threads) { t.join(); } long end = System.currentTimeMillis(); // We should only have to wait on one to parse the xml Assert.assertTrue((end - start) < 8000); } @Test public void wiki1Test() throws Exception { MediaWikiType wiki1 = getWiki1(); List<QueryResult<?>> results = wikiToMultimap(wiki1); ZooKeeperInstance zk = new ZooKeeperInstance(mac.getInstanceName(), mac.getZooKeepers()); Connector con = zk.getConnector("root", new PasswordToken("")); con.tableOperations().create(Defaults.DATA_TABLE); con.tableOperations().create(Defaults.METADATA_TABLE); SortableResult id = SortableResult.create(con, new Authorizations("en"), Sets.newHashSet(Index.define(PAGE_ID))); Sorting s = new SortingImpl(); s.register(id); s.addResults(id, results); Column pageIdCol = Column.create(PAGE_ID); Iterable<MultimapQueryResult> newResults = s.fetch(id); Assert.assertNotNull(newResults); long count = 0; String prevPageId = ""; for (MultimapQueryResult res : newResults) { Collection<SValue> pageIds = res.get(pageIdCol); String currPageId = null; for (SValue pageId : pageIds) { if (null == currPageId) { currPageId = pageId.value(); } // If we have multiple pageIds for this record, choose the least one greater than the prev if (prevPageId.compareTo(pageId.value()) < 0 && pageId.value().compareTo(currPageId) < 0) { currPageId = pageId.value(); } } count++; } Assert.assertEquals(wiki1.getPage().size(), count); } }
haha.. and then update these package names too
src/test/java/sorts/results/integration/SortsIntegrationTest.java
haha.. and then update these package names too
Java
apache-2.0
960d20fdc73127b73123835b6f45f802d4942dc8
0
foam-framework/foam2,foam-framework/foam2,jacksonic/vjlofvhjfgm,foam-framework/foam2,jacksonic/vjlofvhjfgm,foam-framework/foam2,foam-framework/foam2,jacksonic/vjlofvhjfgm
/** * @license * Copyright 2017 The FOAM Authors. All Rights Reserved. * http://www.apache.org/licenses/LICENSE-2.0 */ package foam.core; import javax.xml.stream.XMLStreamReader; import java.nio.ByteBuffer; import java.security.MessageDigest; import java.security.Signature; import java.security.SignatureException; public abstract class AbstractIntPropertyInfo extends AbstractPropertyInfo { protected static final ThreadLocal<ByteBuffer> bb = new ThreadLocal<ByteBuffer>() { @Override protected ByteBuffer initialValue() { return ByteBuffer.wrap(new byte[4]); } @Override public ByteBuffer get() { ByteBuffer bb = super.get(); bb.clear(); return bb; } }; public int compareValues(int o1, int o2) { return Integer.compare(o1, o2); } public Object fromString(String value) { return Integer.valueOf(value); } @Override public Object fromXML(X x, XMLStreamReader reader) { super.fromXML(x, reader); return Integer.valueOf(reader.getText()); } @Override public void updateDigest(FObject obj, MessageDigest md) { if ( ! includeInDigest() ) return; int val = (int) get(obj); md.update((ByteBuffer) bb.get().putInt(val).flip()); } @Override public void updateSignature(FObject obj, Signature sig) throws SignatureException { if ( ! includeInSignature() ) return; int val = (int) get(obj); sig.update((ByteBuffer) bb.get().putInt(val).flip()); } public String getSQLType() { return "INT"; } public Class getValueClass() { return int.class; } public int cast(Object o) { if ( o instanceof String ) return Integer.valueOf((String) o); return ((Number) o).intValue(); } public Object get(Object o) { return get_(o); } protected abstract int get_(Object o); public int compare(Object o1, Object o2) { return foam.util.SafetyUtil.compare(get_(o1), get_(o2)); } public int comparePropertyToObject(Object key, Object o) { return foam.util.SafetyUtil.compare(cast(key), get_(o)); } public int comparePropertyToValue(Object key, Object value) { return foam.util.SafetyUtil.compare(cast(key), cast(value)); } public foam.lib.parse.Parser jsonParser() { return foam.lib.json.IntParser.instance(); } public foam.lib.parse.Parser queryParser() { return foam.lib.json.IntParser.instance(); } public foam.lib.parse.Parser csvParser() { return foam.lib.json.IntParser.instance(); } public boolean isDefaultValue(Object o) { return foam.util.SafetyUtil.compare(get_(o), 0) == 0; } public void format(foam.lib.formatter.FObjectFormatter formatter, foam.core.FObject obj) { formatter.output(get_(obj)); } }
src/foam/core/AbstractIntPropertyInfo.java
/** * @license * Copyright 2017 The FOAM Authors. All Rights Reserved. * http://www.apache.org/licenses/LICENSE-2.0 */ package foam.core; import javax.xml.stream.XMLStreamReader; import java.nio.ByteBuffer; import java.security.MessageDigest; import java.security.Signature; import java.security.SignatureException; public abstract class AbstractIntPropertyInfo extends AbstractPropertyInfo { protected static final ThreadLocal<ByteBuffer> bb = new ThreadLocal<ByteBuffer>() { @Override protected ByteBuffer initialValue() { return ByteBuffer.wrap(new byte[4]); } @Override public ByteBuffer get() { ByteBuffer bb = super.get(); bb.clear(); return bb; } }; public int compareValues(int o1, int o2) { return Integer.compare(o1, o2); } public Object fromString(String value) { return Integer.valueOf(value); } @Override public Object fromXML(X x, XMLStreamReader reader) { super.fromXML(x, reader); return Integer.valueOf(reader.getText()); } @Override public void updateDigest(FObject obj, MessageDigest md) { if ( ! includeInDigest() ) return; int val = (int) get(obj); md.update((ByteBuffer) bb.get().putInt(val).flip()); } @Override public void updateSignature(FObject obj, Signature sig) throws SignatureException { if ( ! includeInSignature() ) return; int val = (int) get(obj); sig.update((ByteBuffer) bb.get().putInt(val).flip()); } public String getSQLType() { return "INT"; } public Class getValueClass() { return int.class; } public int cast(Object o) { int i = ( o instanceof String ) ? Integer.valueOf((String) o) : (int) o; return ( o instanceof Number ) ? ((Number) o).intValue() : i; } public Object get(Object o) { return get_(o); } protected abstract int get_(Object o); public int compare(Object o1, Object o2) { return foam.util.SafetyUtil.compare(get_(o1), get_(o2)); } public int comparePropertyToObject(Object key, Object o) { return foam.util.SafetyUtil.compare(cast(key), get_(o)); } public int comparePropertyToValue(Object key, Object value) { return foam.util.SafetyUtil.compare(cast(key), cast(value)); } public foam.lib.parse.Parser jsonParser() { return foam.lib.json.IntParser.instance(); } public foam.lib.parse.Parser queryParser() { return foam.lib.json.IntParser.instance(); } public foam.lib.parse.Parser csvParser() { return foam.lib.json.IntParser.instance(); } public boolean isDefaultValue(Object o) { return foam.util.SafetyUtil.compare(get_(o), 0) == 0; } public void format(foam.lib.formatter.FObjectFormatter formatter, foam.core.FObject obj) { formatter.output(get_(obj)); } }
Change AbstractIntPropertyInfo.cast().
src/foam/core/AbstractIntPropertyInfo.java
Change AbstractIntPropertyInfo.cast().
Java
apache-2.0
ffe3d08d2ee2f139645c07be662cd5d048214f07
0
conlini/elasticsearch-tmdb-river
package com.conlini.es.tmdb.river.core; import java.io.IOException; import java.text.ParseException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.river.AbstractRiverComponent; import org.elasticsearch.river.River; import org.elasticsearch.river.RiverName; import org.elasticsearch.river.RiverSettings; import org.springframework.http.MediaType; import org.springframework.http.converter.HttpMessageConverter; import org.springframework.http.converter.json.MappingJacksonHttpMessageConverter; import org.springframework.web.client.RestTemplate; import com.conlini.es.tmdb.river.pojo.DiscoverResponse; import com.conlini.es.tmdb.river.pojo.DiscoverResult; import com.conlini.es.tmdb.river.pojo.Movie; import com.conlini.es.tmdb.river.pojo.SourceProvider; import com.conlini.es.tmdb.river.pojo.TV; public class TMDBRiver extends AbstractRiverComponent implements River { private Client client; private String apiKey; private final String basePath = "http://api.themoviedb.org/3"; private Integer maxPages; private boolean lastPageFetched = false; private static enum DISCOVERY_TYPE { MOVIE("/discover/movie", "movie", "contents", Movie.class), TV( "/discover/tv", "tv", "contents", TV.class); private final String path; private final String contentPath; private final String esType; private final Class<? extends SourceProvider> sourceClass; private DISCOVERY_TYPE(String path, String contentPath, String esType, Class<? extends SourceProvider> sourceClass) { this.path = path; this.esType = esType; this.contentPath = contentPath; this.sourceClass = sourceClass; } public String getPath() { return this.path; } public String getEsType() { return this.esType; } public String getContentPath() { return this.contentPath; } } private DISCOVERY_TYPE discoveryType = DISCOVERY_TYPE.MOVIE; private BlockingQueue<List<DiscoverResult>> queues = new ArrayBlockingQueue<List<DiscoverResult>>( 1); @Inject protected TMDBRiver(RiverName riverName, RiverSettings settings, Client client) { super(riverName, settings); this.client = client; if (settings.settings().containsKey("api_key")) { this.apiKey = (String) settings.settings().get("api_key"); } if (settings.settings().containsKey("discovery_type")) { String discovery_type = (String) settings.settings().get( "discovery_type"); if (discovery_type.equals("tv")) { discoveryType = DISCOVERY_TYPE.TV; } else if (discovery_type.equals("movie")) { discoveryType = DISCOVERY_TYPE.MOVIE; } } if (settings.settings().containsKey("max_pages")) { maxPages = (Integer) settings.settings().get("max_pages"); } // print all the settings that have been extracted. Assert that we // Received the api key. Don;t print it out for security reasons. logger.info(String.format("Recieved apiKey - %s", (null != apiKey && !apiKey.equals("")))); logger.info(String.format("Discovery Type = %s", discoveryType)); logger.info("String max_pages - " + maxPages); } public RiverName riverName() { return this.riverName; } public void start() { logger.info(String.format("Starting %s river", riverName)); if (null != apiKey && !apiKey.equals("")) { RestTemplate template = initTemplate(); String fetchUrl = basePath + discoveryType.getPath() + "?api_key={api_key}&page={page_no}"; DiscoverResponse response = template.getForObject(fetchUrl, DiscoverResponse.class, getVariableVals("1")); // Start 1 thread to get the remaining pages. add to a queue // start a thread that gets the Content logger.info(String.format( "Received response for %d content. Fetching %d pages ", response.getTotalResults(), response.getTotalPages())); ExecutorService service = Executors.newCachedThreadPool(); ContentFetcher contentFetcher = new ContentFetcher(template); Future<Object> future = service.submit(contentFetcher); if (null == maxPages) { maxPages = response.getTotalPages(); } PagesFetcher pagesFetcher = new PagesFetcher(maxPages, fetchUrl, template); service.submit(pagesFetcher); try { Object complete = future.get(); } catch (InterruptedException e) { logger.error("Error", e); } catch (ExecutionException e) { logger.error("Error", e); } } else { logger.error("No API Key found. Nothing being pulled"); } client.admin().indices().prepareDeleteMapping("_river") .setType(riverName.name()).execute(); } private Map<String, ?> getVariableVals(String pageNum) { Map<String, Object> values = new HashMap<String, Object>(); values.put("api_key", apiKey); values.put("page_no", pageNum); return values; } private RestTemplate initTemplate() { RestTemplate template = new RestTemplate(); List<HttpMessageConverter<?>> convertors = template .getMessageConverters(); MappingJacksonHttpMessageConverter converter = new MappingJacksonHttpMessageConverter(); List<MediaType> mediaTypes = new ArrayList<MediaType>(); mediaTypes.add(new MediaType("application", "json")); converter.setSupportedMediaTypes(mediaTypes); convertors.add(converter); template.setMessageConverters(convertors); return template; } public void close() { } public Client getClient() { return client; } public void setClient(Client client) { this.client = client; } private class PagesFetcher implements Callable<Object> { private Integer totalPages; private String fetchUrl; private RestTemplate template; public PagesFetcher(Integer totalPages, String fetchUrl, RestTemplate template) { super(); this.totalPages = totalPages; this.fetchUrl = fetchUrl; this.template = template; } @Override public Object call() throws Exception { for (int i = 1; i < totalPages; i++) { logger.info("Fetching page no - " + i); DiscoverResponse response = template.getForObject(fetchUrl, DiscoverResponse.class, getVariableVals(i + "")); try { queues.offer(response.getResults(), 2, TimeUnit.SECONDS); } catch (InterruptedException e) { logger.error("Failed to offer results to the queue", e); } try { Thread.sleep(3000); } catch (InterruptedException e) { logger.error("Error", e); } } lastPageFetched = true; return new Object(); } } private class ContentFetcher implements Callable<Object> { private boolean running = true; private final String fetchUrl = basePath + "/{type}/{id}?api_key={api_key}"; private RestTemplate template; public ContentFetcher(RestTemplate template) { super(); this.template = template; } @SuppressWarnings("unchecked") private void fetchContents(List<DiscoverResult> results) { BulkRequestBuilder requestBuilder = client.prepareBulk(); logger.info(String.format("Fetching movies - %s", results)); for (DiscoverResult result : results) { SourceProvider sourceProvider = template.getForObject(fetchUrl, discoveryType.sourceClass, discoveryType .getContentPath(), result.getId().toString(), apiKey); try { requestBuilder.add(client.prepareIndex("tmdb", discoveryType.getEsType(), result.getId().toString()).setSource( sourceProvider.source())); } catch (IOException e) { logger.error("Error", e); } catch (ParseException e) { logger.error("Error", e); } } requestBuilder.execute().actionGet(); } @Override public Object call() throws Exception { while (running) { try { List<DiscoverResult> results = queues.take(); fetchContents(results); if (lastPageFetched && queues.isEmpty()) { // a very dirty way to end the fetch of data. We do this // as we need to now unregister the river for future // auto fetches. // FIXME need a cleaner sync between threads to do this running = false; } } catch (InterruptedException e) { logger.error("Failed to take next from queue", e); running = false; } } return new Object(); } } }
src/main/java/com/conlini/es/tmdb/river/core/TMDBRiver.java
package com.conlini.es.tmdb.river.core; import java.io.IOException; import java.text.ParseException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.river.AbstractRiverComponent; import org.elasticsearch.river.River; import org.elasticsearch.river.RiverName; import org.elasticsearch.river.RiverSettings; import org.springframework.http.MediaType; import org.springframework.http.converter.HttpMessageConverter; import org.springframework.http.converter.json.MappingJacksonHttpMessageConverter; import org.springframework.web.client.RestTemplate; import com.conlini.es.tmdb.river.pojo.DiscoverResponse; import com.conlini.es.tmdb.river.pojo.DiscoverResult; import com.conlini.es.tmdb.river.pojo.Movie; import com.conlini.es.tmdb.river.pojo.SourceProvider; import com.conlini.es.tmdb.river.pojo.TV; public class TMDBRiver extends AbstractRiverComponent implements River { private Client client; private String apiKey; private final String basePath = "http://api.themoviedb.org/3"; private Integer maxPages; private static enum DISCOVERY_TYPE { MOVIE("/discover/movie", "movie", "contents", Movie.class), TV( "/discover/tv", "tv", "contents", TV.class); private final String path; private final String contentPath; private final String esType; private final Class<? extends SourceProvider> sourceClass; private DISCOVERY_TYPE(String path, String contentPath, String esType, Class<? extends SourceProvider> sourceClass) { this.path = path; this.esType = esType; this.contentPath = contentPath; this.sourceClass = sourceClass; } public String getPath() { return this.path; } public String getEsType() { return this.esType; } public String getContentPath() { return this.contentPath; } } private DISCOVERY_TYPE discoveryType = DISCOVERY_TYPE.MOVIE; private BlockingQueue<List<DiscoverResult>> queues = new ArrayBlockingQueue<List<DiscoverResult>>( 1); @Inject protected TMDBRiver(RiverName riverName, RiverSettings settings, Client client) { super(riverName, settings); this.client = client; if (settings.settings().containsKey("api_key")) { this.apiKey = (String) settings.settings().get("api_key"); } if (settings.settings().containsKey("discovery_type")) { String discovery_type = (String) settings.settings().get( "discovery_type"); if (discovery_type.equals("tv")) { discoveryType = DISCOVERY_TYPE.TV; } else if (discovery_type.equals("movie")) { discoveryType = DISCOVERY_TYPE.MOVIE; } } if (settings.settings().containsKey("max_pages")) { maxPages = (Integer) settings.settings().get("max_pages"); } // print all the settings that have been extracted. Assert that we // Received the api key. Don;t print it out for security reasons. logger.info(String.format("Recieved apiKey - %s", (null != apiKey && !apiKey.equals("")))); logger.info(String.format("Discovery Type = %s", discoveryType)); logger.info("String max_pages - " + maxPages); } public RiverName riverName() { return this.riverName; } public void start() { logger.info(String.format("Starting %s river", riverName)); if (null != apiKey && !apiKey.equals("")) { RestTemplate template = initTemplate(); String fetchUrl = basePath + discoveryType.getPath() + "?api_key={api_key}&page={page_no}"; DiscoverResponse response = template.getForObject(fetchUrl, DiscoverResponse.class, getVariableVals("1")); // Start 1 thread to get the remaining pages. add to a queue // start a thread that gets the Content logger.info(String.format( "Received response for %d content. Fetching %d pages ", response.getTotalResults(), response.getTotalPages())); ExecutorService service = Executors.newCachedThreadPool(); ContentFetcher contentFetcher = new ContentFetcher(template); service.execute(contentFetcher); if (null == maxPages) { maxPages = response.getTotalPages(); } PagesFetcher pagesFetcher = new PagesFetcher(maxPages, fetchUrl, template); service.execute(pagesFetcher); } else { logger.error("No API Key found. Nothing being pulled"); } } private Map<String, ?> getVariableVals(String pageNum) { Map<String, Object> values = new HashMap<String, Object>(); values.put("api_key", apiKey); values.put("page_no", pageNum); return values; } private RestTemplate initTemplate() { RestTemplate template = new RestTemplate(); List<HttpMessageConverter<?>> convertors = template .getMessageConverters(); MappingJacksonHttpMessageConverter converter = new MappingJacksonHttpMessageConverter(); List<MediaType> mediaTypes = new ArrayList<MediaType>(); mediaTypes.add(new MediaType("application", "json")); converter.setSupportedMediaTypes(mediaTypes); convertors.add(converter); template.setMessageConverters(convertors); return template; } public void close() { } public Client getClient() { return client; } public void setClient(Client client) { this.client = client; } private class PagesFetcher implements Runnable { private Integer totalPages; private String fetchUrl; private RestTemplate template; public PagesFetcher(Integer totalPages, String fetchUrl, RestTemplate template) { super(); this.totalPages = totalPages; this.fetchUrl = fetchUrl; this.template = template; } public void run() { for (int i = 1; i < totalPages; i++) { logger.info("Fetching page no - " + i); DiscoverResponse response = template.getForObject(fetchUrl, DiscoverResponse.class, getVariableVals(i + "")); try { queues.offer(response.getResults(), 2, TimeUnit.SECONDS); } catch (InterruptedException e) { logger.error("Failed to offer results to the queue", e); } try { Thread.sleep(3000); } catch (InterruptedException e) { logger.error("Error", e); } } } } private class ContentFetcher implements Runnable { private boolean running = true; private final String fetchUrl = basePath + "/{type}/{id}?api_key={api_key}"; private RestTemplate template; public ContentFetcher(RestTemplate template) { super(); this.template = template; } @Override public void run() { while (running) { try { List<DiscoverResult> results = queues.take(); fetchContents(results); } catch (InterruptedException e) { logger.error("Failed to take next from queue", e); running = false; } } } @SuppressWarnings("unchecked") private void fetchContents(List<DiscoverResult> results) { BulkRequestBuilder requestBuilder = client.prepareBulk(); logger.info(String.format("Fetching movies - %s", results)); for (DiscoverResult result : results) { SourceProvider sourceProvider = template.getForObject(fetchUrl, discoveryType.sourceClass, discoveryType .getContentPath(), result.getId().toString(), apiKey); try { requestBuilder.add(client.prepareIndex("tmdb", discoveryType.getEsType(), result.getId().toString()).setSource( sourceProvider.source())); } catch (IOException e) { logger.error("Error", e); } catch (ParseException e) { logger.error("Error", e); } } requestBuilder.execute().actionGet(); } } }
unregister the river after completing the scrape
src/main/java/com/conlini/es/tmdb/river/core/TMDBRiver.java
unregister the river after completing the scrape